mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-02-15 20:33:26 +00:00
Autoformat
This commit is contained in:
parent
570b7de43f
commit
c658677032
@ -26,18 +26,17 @@ object Domain {
|
||||
Tld
|
||||
.findTld(str)
|
||||
.map(tld => (str.dropRight(tld.length), tld))
|
||||
.map({
|
||||
case (names, tld) =>
|
||||
names.split('.').toList match {
|
||||
case Nil => Left(s"Not a domain: $str")
|
||||
case segs
|
||||
if segs.forall(label =>
|
||||
label.trim.nonEmpty && label
|
||||
.forall(c => c.isLetter || c.isDigit || c == '-')
|
||||
) =>
|
||||
Right(Domain(NonEmptyList.fromListUnsafe(segs), tld))
|
||||
case _ => Left(s"Not a domain: $str")
|
||||
}
|
||||
.map({ case (names, tld) =>
|
||||
names.split('.').toList match {
|
||||
case Nil => Left(s"Not a domain: $str")
|
||||
case segs
|
||||
if segs.forall(label =>
|
||||
label.trim.nonEmpty && label
|
||||
.forall(c => c.isLetter || c.isDigit || c == '-')
|
||||
) =>
|
||||
Right(Domain(NonEmptyList.fromListUnsafe(segs), tld))
|
||||
case _ => Left(s"Not a domain: $str")
|
||||
}
|
||||
})
|
||||
.getOrElse(Left(s"Not a domain $str"))
|
||||
|
||||
|
@ -39,14 +39,14 @@ object DateFind {
|
||||
}
|
||||
|
||||
private object SimpleDate {
|
||||
val p0 = (readYear >> readMonth >> readDay).map {
|
||||
case ((y, m), d) => SimpleDate(y, m, d)
|
||||
val p0 = (readYear >> readMonth >> readDay).map { case ((y, m), d) =>
|
||||
SimpleDate(y, m, d)
|
||||
}
|
||||
val p1 = (readDay >> readMonth >> readYear).map {
|
||||
case ((d, m), y) => SimpleDate(y, m, d)
|
||||
val p1 = (readDay >> readMonth >> readYear).map { case ((d, m), y) =>
|
||||
SimpleDate(y, m, d)
|
||||
}
|
||||
val p2 = (readMonth >> readDay >> readYear).map {
|
||||
case ((m, d), y) => SimpleDate(y, m, d)
|
||||
val p2 = (readMonth >> readDay >> readYear).map { case ((m, d), y) =>
|
||||
SimpleDate(y, m, d)
|
||||
}
|
||||
|
||||
// ymd ✔, ydm, dmy ✔, dym, myd, mdy ✔
|
||||
|
@ -145,10 +145,9 @@ final class StanfordTextClassifier[F[_]: Sync: ContextShift](
|
||||
|
||||
def prepend(pre: String, data: Map[String, String]): Map[String, String] =
|
||||
data.toList
|
||||
.map({
|
||||
case (k, v) =>
|
||||
if (k.startsWith(pre)) (k, v)
|
||||
else (pre + k, v)
|
||||
.map({ case (k, v) =>
|
||||
if (k.startsWith(pre)) (k, v)
|
||||
else (pre + k, v)
|
||||
})
|
||||
.toMap
|
||||
}
|
||||
|
@ -29,9 +29,8 @@ object StanfordTextClassifierSuite extends SimpleTestSuite {
|
||||
.repeat
|
||||
.take(10)
|
||||
)
|
||||
.flatMap({
|
||||
case (a, b) =>
|
||||
Stream.emits(Seq(a, b))
|
||||
.flatMap({ case (a, b) =>
|
||||
Stream.emits(Seq(a, b))
|
||||
})
|
||||
.covary[IO]
|
||||
|
||||
@ -53,23 +52,22 @@ object StanfordTextClassifierSuite extends SimpleTestSuite {
|
||||
} yield (dir, blocker)
|
||||
|
||||
things
|
||||
.use {
|
||||
case (dir, blocker) =>
|
||||
val classifier = new StanfordTextClassifier[IO](cfg, blocker)
|
||||
.use { case (dir, blocker) =>
|
||||
val classifier = new StanfordTextClassifier[IO](cfg, blocker)
|
||||
|
||||
val modelFile = dir.resolve("test.ser.gz")
|
||||
for {
|
||||
_ <-
|
||||
LenientUri
|
||||
.fromJava(getClass.getResource("/test.ser.gz"))
|
||||
.readURL[IO](4096, blocker)
|
||||
.through(fs2.io.file.writeAll(modelFile, blocker))
|
||||
.compile
|
||||
.drain
|
||||
model = ClassifierModel(modelFile)
|
||||
cat <- classifier.classify(logger, model, "there is receipt always")
|
||||
_ = assertEquals(cat, Some("receipt"))
|
||||
} yield ()
|
||||
val modelFile = dir.resolve("test.ser.gz")
|
||||
for {
|
||||
_ <-
|
||||
LenientUri
|
||||
.fromJava(getClass.getResource("/test.ser.gz"))
|
||||
.readURL[IO](4096, blocker)
|
||||
.through(fs2.io.file.writeAll(modelFile, blocker))
|
||||
.compile
|
||||
.drain
|
||||
model = ClassifierModel(modelFile)
|
||||
cat <- classifier.classify(logger, model, "there is receipt always")
|
||||
_ = assertEquals(cat, Some("receipt"))
|
||||
} yield ()
|
||||
}
|
||||
.unsafeRunSync()
|
||||
}
|
||||
|
@ -222,12 +222,11 @@ object LenientUri {
|
||||
def percentDecode(s: String): String =
|
||||
if (!s.contains("%")) s
|
||||
else
|
||||
s.foldLeft(("", ByteVector.empty)) {
|
||||
case ((acc, res), c) =>
|
||||
if (acc.length == 2) ("", res ++ ByteVector.fromValidHex(acc.drop(1) + c))
|
||||
else if (acc.startsWith("%")) (acc :+ c, res)
|
||||
else if (c == '%') ("%", res)
|
||||
else (acc, res :+ c.toByte)
|
||||
s.foldLeft(("", ByteVector.empty)) { case ((acc, res), c) =>
|
||||
if (acc.length == 2) ("", res ++ ByteVector.fromValidHex(acc.drop(1) + c))
|
||||
else if (acc.startsWith("%")) (acc :+ c, res)
|
||||
else if (c == '%') ("%", res)
|
||||
else (acc, res :+ c.toByte)
|
||||
}._2
|
||||
.decodeUtf8
|
||||
.fold(throw _, identity)
|
||||
|
@ -20,9 +20,8 @@ object SystemCommand {
|
||||
|
||||
def replace(repl: Map[String, String]): Config =
|
||||
mapArgs(s =>
|
||||
repl.foldLeft(s) {
|
||||
case (res, (k, v)) =>
|
||||
res.replace(k, v)
|
||||
repl.foldLeft(s) { case (res, (k, v)) =>
|
||||
res.replace(k, v)
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -126,10 +126,9 @@ object ConversionTest extends SimpleTestSuite with FileChecks {
|
||||
conversion.use { conv =>
|
||||
def check(n: Long): Handler[IO, Unit] =
|
||||
storePdfTxtHandler(dir.resolve(s"test-$n.pdf"), dir.resolve(s"test-$n.txt"))
|
||||
.map {
|
||||
case (p, t) =>
|
||||
assert(p.isNonEmpty && p.isPDF)
|
||||
assert(t.isNonEmpty && t.isPlainText)
|
||||
.map { case (p, t) =>
|
||||
assert(p.isNonEmpty && p.isPDF)
|
||||
assert(t.isNonEmpty && t.isPlainText)
|
||||
}
|
||||
|
||||
runConversion(pdfAndTxt, check, conv).compile.drain
|
||||
@ -165,12 +164,11 @@ object ConversionTest extends SimpleTestSuite with FileChecks {
|
||||
.emits(uris)
|
||||
.covary[IO]
|
||||
.zipWithIndex
|
||||
.evalMap({
|
||||
case (uri, index) =>
|
||||
val load = uri.readURL[IO](8192, blocker)
|
||||
val dataType = DataType.filename(uri.path.segments.last)
|
||||
logger.info(s"Processing file ${uri.path.asString}") *>
|
||||
conv.toPDF(dataType, Language.German, handler(index))(load)
|
||||
.evalMap({ case (uri, index) =>
|
||||
val load = uri.readURL[IO](8192, blocker)
|
||||
val dataType = DataType.filename(uri.path.segments.last)
|
||||
logger.info(s"Processing file ${uri.path.asString}") *>
|
||||
conv.toPDF(dataType, Language.German, handler(index))(load)
|
||||
})
|
||||
|
||||
def commandsExist: Boolean =
|
||||
|
@ -44,14 +44,14 @@ object PoiExtract {
|
||||
getDocx(data)
|
||||
case PoiType.msoffice =>
|
||||
EitherT(getDoc[F](data))
|
||||
.recoverWith({
|
||||
case _ => EitherT(getXls[F](data))
|
||||
.recoverWith({ case _ =>
|
||||
EitherT(getXls[F](data))
|
||||
})
|
||||
.value
|
||||
case PoiType.ooxml =>
|
||||
EitherT(getDocx[F](data))
|
||||
.recoverWith({
|
||||
case _ => EitherT(getXlsx[F](data))
|
||||
.recoverWith({ case _ =>
|
||||
EitherT(getXlsx[F](data))
|
||||
})
|
||||
.value
|
||||
case mt =>
|
||||
|
@ -14,15 +14,14 @@ object OdfExtractTest extends SimpleTestSuite {
|
||||
)
|
||||
|
||||
test("test extract from odt") {
|
||||
files.foreach {
|
||||
case (file, len) =>
|
||||
val is = file.toJavaUrl.map(_.openStream()).fold(sys.error, identity)
|
||||
val str1 = OdfExtract.get(is).fold(throw _, identity)
|
||||
assertEquals(str1.length, len)
|
||||
files.foreach { case (file, len) =>
|
||||
val is = file.toJavaUrl.map(_.openStream()).fold(sys.error, identity)
|
||||
val str1 = OdfExtract.get(is).fold(throw _, identity)
|
||||
assertEquals(str1.length, len)
|
||||
|
||||
val data = file.readURL[IO](8192, blocker)
|
||||
val str2 = OdfExtract.get[IO](data).unsafeRunSync().fold(throw _, identity)
|
||||
assertEquals(str2, str1)
|
||||
val data = file.readURL[IO](8192, blocker)
|
||||
val str2 = OdfExtract.get[IO](data).unsafeRunSync().fold(throw _, identity)
|
||||
assertEquals(str2, str1)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -14,24 +14,22 @@ object PdfboxExtractTest extends SimpleTestSuite {
|
||||
)
|
||||
|
||||
test("extract text from text PDFs by inputstream") {
|
||||
textPDFs.foreach {
|
||||
case (file, txt) =>
|
||||
val url = file.toJavaUrl.fold(sys.error, identity)
|
||||
val str = PdfboxExtract.getText(url.openStream()).fold(throw _, identity)
|
||||
val received = removeFormatting(str.value)
|
||||
val expect = removeFormatting(txt)
|
||||
assertEquals(received, expect)
|
||||
textPDFs.foreach { case (file, txt) =>
|
||||
val url = file.toJavaUrl.fold(sys.error, identity)
|
||||
val str = PdfboxExtract.getText(url.openStream()).fold(throw _, identity)
|
||||
val received = removeFormatting(str.value)
|
||||
val expect = removeFormatting(txt)
|
||||
assertEquals(received, expect)
|
||||
}
|
||||
}
|
||||
|
||||
test("extract text from text PDFs via Stream") {
|
||||
textPDFs.foreach {
|
||||
case (file, txt) =>
|
||||
val data = file.readURL[IO](8192, blocker)
|
||||
val str = PdfboxExtract.getText(data).unsafeRunSync().fold(throw _, identity)
|
||||
val received = removeFormatting(str.value)
|
||||
val expect = removeFormatting(txt)
|
||||
assertEquals(received, expect)
|
||||
textPDFs.foreach { case (file, txt) =>
|
||||
val data = file.readURL[IO](8192, blocker)
|
||||
val str = PdfboxExtract.getText(data).unsafeRunSync().fold(throw _, identity)
|
||||
val received = removeFormatting(str.value)
|
||||
val expect = removeFormatting(txt)
|
||||
assertEquals(received, expect)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -17,23 +17,22 @@ object PoiExtractTest extends SimpleTestSuite {
|
||||
)
|
||||
|
||||
test("extract text from ms office files") {
|
||||
officeFiles.foreach {
|
||||
case (file, len) =>
|
||||
val str1 = PoiExtract
|
||||
.get[IO](file.readURL[IO](8192, blocker), MimeTypeHint.none)
|
||||
.unsafeRunSync()
|
||||
.fold(throw _, identity)
|
||||
officeFiles.foreach { case (file, len) =>
|
||||
val str1 = PoiExtract
|
||||
.get[IO](file.readURL[IO](8192, blocker), MimeTypeHint.none)
|
||||
.unsafeRunSync()
|
||||
.fold(throw _, identity)
|
||||
|
||||
val str2 = PoiExtract
|
||||
.get[IO](
|
||||
file.readURL[IO](8192, blocker),
|
||||
MimeTypeHint(Some(file.path.segments.last), None)
|
||||
)
|
||||
.unsafeRunSync()
|
||||
.fold(throw _, identity)
|
||||
val str2 = PoiExtract
|
||||
.get[IO](
|
||||
file.readURL[IO](8192, blocker),
|
||||
MimeTypeHint(Some(file.path.segments.last), None)
|
||||
)
|
||||
.unsafeRunSync()
|
||||
.fold(throw _, identity)
|
||||
|
||||
assertEquals(str1, str2)
|
||||
assertEquals(str1.length, len)
|
||||
assertEquals(str1, str2)
|
||||
assertEquals(str1.length, len)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -25,22 +25,20 @@ object ImageSizeTest extends SimpleTestSuite {
|
||||
)
|
||||
|
||||
test("get sizes from input-stream") {
|
||||
files.foreach {
|
||||
case (uri, expect) =>
|
||||
val url = uri.toJavaUrl.fold(sys.error, identity)
|
||||
Using.resource(url.openStream()) { in =>
|
||||
val dim = ImageSize.get(in)
|
||||
assertEquals(dim, expect.some)
|
||||
}
|
||||
files.foreach { case (uri, expect) =>
|
||||
val url = uri.toJavaUrl.fold(sys.error, identity)
|
||||
Using.resource(url.openStream()) { in =>
|
||||
val dim = ImageSize.get(in)
|
||||
assertEquals(dim, expect.some)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test("get sizes from stream") {
|
||||
files.foreach {
|
||||
case (uri, expect) =>
|
||||
val stream = uri.readURL[IO](8192, blocker)
|
||||
val dim = ImageSize.get(stream).unsafeRunSync()
|
||||
assertEquals(dim, expect.some)
|
||||
files.foreach { case (uri, expect) =>
|
||||
val stream = uri.readURL[IO](8192, blocker)
|
||||
val dim = ImageSize.get(stream).unsafeRunSync()
|
||||
assertEquals(dim, expect.some)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -59,9 +59,9 @@ object Migration {
|
||||
s"Applying index migration ${m.version}/${m.description} failed"
|
||||
) *>
|
||||
ctx.store.transact(RFtsMigration.deleteById(rec.id)) *> Effect[F]
|
||||
.raiseError[Unit](
|
||||
ex
|
||||
)
|
||||
.raiseError[Unit](
|
||||
ex
|
||||
)
|
||||
})
|
||||
} yield ret).getOrElseF(
|
||||
ctx.logger.info(s"Migration ${m.version}/${m.description} already applied.")
|
||||
|
@ -41,20 +41,19 @@ object CreateItem {
|
||||
.flatMap(f => ctx.store.bitpeace.get(f.fileMetaId.id).map(fm => (f, fm)))
|
||||
.collect({ case (f, Some(fm)) if isValidFile(fm) => f })
|
||||
.zipWithIndex
|
||||
.evalMap({
|
||||
case (f, index) =>
|
||||
Ident
|
||||
.randomId[F]
|
||||
.map(id =>
|
||||
RAttachment(
|
||||
id,
|
||||
itemId,
|
||||
f.fileMetaId,
|
||||
index.toInt + offset,
|
||||
now,
|
||||
f.name
|
||||
)
|
||||
.evalMap({ case (f, index) =>
|
||||
Ident
|
||||
.randomId[F]
|
||||
.map(id =>
|
||||
RAttachment(
|
||||
id,
|
||||
itemId,
|
||||
f.fileMetaId,
|
||||
index.toInt + offset,
|
||||
now,
|
||||
f.name
|
||||
)
|
||||
)
|
||||
})
|
||||
}
|
||||
.compile
|
||||
|
@ -82,10 +82,9 @@ object FindProposal {
|
||||
def removeDuplicates(labels: List[NerLabel]): List[NerLabel] =
|
||||
labels
|
||||
.sortBy(_.startPosition)
|
||||
.foldLeft((Set.empty[String], List.empty[NerLabel])) {
|
||||
case ((seen, result), el) =>
|
||||
if (seen.contains(el.tag.name + el.label.toLowerCase)) (seen, result)
|
||||
else (seen + (el.tag.name + el.label.toLowerCase), el :: result)
|
||||
.foldLeft((Set.empty[String], List.empty[NerLabel])) { case ((seen, result), el) =>
|
||||
if (seen.contains(el.tag.name + el.label.toLowerCase)) (seen, result)
|
||||
else (seen + (el.tag.name + el.label.toLowerCase), el :: result)
|
||||
}
|
||||
._2
|
||||
|
||||
|
@ -14,17 +14,16 @@ object InfoRoutes {
|
||||
def apply[F[_]: Sync](): HttpRoutes[F] = {
|
||||
val dsl = new Http4sDsl[F] {}
|
||||
import dsl._
|
||||
HttpRoutes.of[F] {
|
||||
case GET -> (Root / "version") =>
|
||||
Ok(
|
||||
VersionInfo(
|
||||
BuildInfo.version,
|
||||
BuildInfo.builtAtMillis,
|
||||
BuildInfo.builtAtString,
|
||||
BuildInfo.gitHeadCommit.getOrElse(""),
|
||||
BuildInfo.gitDescribedVersion.getOrElse("")
|
||||
)
|
||||
HttpRoutes.of[F] { case GET -> (Root / "version") =>
|
||||
Ok(
|
||||
VersionInfo(
|
||||
BuildInfo.version,
|
||||
BuildInfo.builtAtMillis,
|
||||
BuildInfo.builtAtString,
|
||||
BuildInfo.gitHeadCommit.getOrElse(""),
|
||||
BuildInfo.gitDescribedVersion.getOrElse("")
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -46,9 +46,8 @@ object Config {
|
||||
pattern == ip || (inet.isLoopbackAddress && pattern == "127.0.0.1") || (pattern
|
||||
.split('.')
|
||||
.zip(ipParts)
|
||||
.foldLeft(true) {
|
||||
case (r, (a, b)) =>
|
||||
r && (a == "*" || a == b)
|
||||
.foldLeft(true) { case (r, (a, b)) =>
|
||||
r && (a == "*" || a == b)
|
||||
})
|
||||
|
||||
ips.exists(checkSingle)
|
||||
|
@ -99,13 +99,12 @@ object RestServer {
|
||||
val dsl = new Http4sDsl[F] {}
|
||||
import dsl._
|
||||
|
||||
HttpRoutes.of {
|
||||
case GET -> Root =>
|
||||
Response[F](
|
||||
Status.SeeOther,
|
||||
body = Stream.empty,
|
||||
headers = Headers.of(Location(Uri(path = path)))
|
||||
).pure[F]
|
||||
HttpRoutes.of { case GET -> Root =>
|
||||
Response[F](
|
||||
Status.SeeOther,
|
||||
body = Stream.empty,
|
||||
headers = Headers.of(Location(Uri(path = path)))
|
||||
).pure[F]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -441,9 +441,8 @@ trait Conversions {
|
||||
oid: Option[Ident],
|
||||
pid: Option[Ident]
|
||||
): F[RContact] =
|
||||
timeId.map {
|
||||
case (id, now) =>
|
||||
RContact(id, c.value, c.kind, pid, oid, now)
|
||||
timeId.map { case (id, now) =>
|
||||
RContact(id, c.value, c.kind, pid, oid, now)
|
||||
}
|
||||
|
||||
// users
|
||||
@ -460,19 +459,18 @@ trait Conversions {
|
||||
)
|
||||
|
||||
def newUser[F[_]: Sync](u: User, cid: Ident): F[RUser] =
|
||||
timeId.map {
|
||||
case (id, now) =>
|
||||
RUser(
|
||||
id,
|
||||
u.login,
|
||||
cid,
|
||||
u.password.getOrElse(Password.empty),
|
||||
u.state,
|
||||
u.email,
|
||||
0,
|
||||
None,
|
||||
now
|
||||
)
|
||||
timeId.map { case (id, now) =>
|
||||
RUser(
|
||||
id,
|
||||
u.login,
|
||||
cid,
|
||||
u.password.getOrElse(Password.empty),
|
||||
u.state,
|
||||
u.email,
|
||||
0,
|
||||
None,
|
||||
now
|
||||
)
|
||||
}
|
||||
|
||||
def changeUser(u: User, cid: Ident): RUser =
|
||||
@ -494,9 +492,8 @@ trait Conversions {
|
||||
Tag(rt.tagId, rt.name, rt.category, rt.created)
|
||||
|
||||
def newTag[F[_]: Sync](t: Tag, cid: Ident): F[RTag] =
|
||||
timeId.map {
|
||||
case (id, now) =>
|
||||
RTag(id, cid, t.name, t.category, now)
|
||||
timeId.map { case (id, now) =>
|
||||
RTag(id, cid, t.name, t.category, now)
|
||||
}
|
||||
|
||||
def changeTag(t: Tag, cid: Ident): RTag =
|
||||
@ -517,9 +514,8 @@ trait Conversions {
|
||||
)
|
||||
|
||||
def newSource[F[_]: Sync](s: Source, cid: Ident): F[RSource] =
|
||||
timeId.map({
|
||||
case (id, now) =>
|
||||
RSource(id, cid, s.abbrev, s.description, 0, s.enabled, s.priority, now, s.folder)
|
||||
timeId.map({ case (id, now) =>
|
||||
RSource(id, cid, s.abbrev, s.description, 0, s.enabled, s.priority, now, s.folder)
|
||||
})
|
||||
|
||||
def changeSource[F[_]: Sync](s: Source, coll: Ident): RSource =
|
||||
@ -540,9 +536,8 @@ trait Conversions {
|
||||
Equipment(re.eid, re.name, re.created)
|
||||
|
||||
def newEquipment[F[_]: Sync](e: Equipment, cid: Ident): F[REquipment] =
|
||||
timeId.map({
|
||||
case (id, now) =>
|
||||
REquipment(id, cid, e.name, now, now)
|
||||
timeId.map({ case (id, now) =>
|
||||
REquipment(id, cid, e.name, now, now)
|
||||
})
|
||||
|
||||
def changeEquipment[F[_]: Sync](e: Equipment, cid: Ident): F[REquipment] =
|
||||
|
@ -18,13 +18,12 @@ object CalEventCheckRoutes {
|
||||
val dsl = new Http4sDsl[F] {}
|
||||
import dsl._
|
||||
|
||||
HttpRoutes.of {
|
||||
case req @ POST -> Root =>
|
||||
for {
|
||||
data <- req.as[CalEventCheck]
|
||||
res <- testEvent(data.event)
|
||||
resp <- Ok(res)
|
||||
} yield resp
|
||||
HttpRoutes.of { case req @ POST -> Root =>
|
||||
for {
|
||||
data <- req.as[CalEventCheck]
|
||||
res <- testEvent(data.event)
|
||||
resp <- Ok(res)
|
||||
} yield resp
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,13 +20,12 @@ object CheckFileRoutes {
|
||||
val dsl = new Http4sDsl[F] with ResponseGenerator[F] {}
|
||||
import dsl._
|
||||
|
||||
HttpRoutes.of {
|
||||
case GET -> Root / checksum =>
|
||||
for {
|
||||
items <-
|
||||
backend.itemSearch.findByFileCollective(checksum, user.account.collective)
|
||||
resp <- Ok(convert(items))
|
||||
} yield resp
|
||||
HttpRoutes.of { case GET -> Root / checksum =>
|
||||
for {
|
||||
items <-
|
||||
backend.itemSearch.findByFileCollective(checksum, user.account.collective)
|
||||
resp <- Ok(convert(items))
|
||||
} yield resp
|
||||
|
||||
}
|
||||
}
|
||||
@ -35,12 +34,11 @@ object CheckFileRoutes {
|
||||
val dsl = new Http4sDsl[F] with ResponseGenerator[F] {}
|
||||
import dsl._
|
||||
|
||||
HttpRoutes.of {
|
||||
case GET -> Root / Ident(id) / checksum =>
|
||||
for {
|
||||
items <- backend.itemSearch.findByFileSource(checksum, id)
|
||||
resp <- Ok(convert(items))
|
||||
} yield resp
|
||||
HttpRoutes.of { case GET -> Root / Ident(id) / checksum =>
|
||||
for {
|
||||
items <- backend.itemSearch.findByFileSource(checksum, id)
|
||||
resp <- Ok(convert(items))
|
||||
} yield resp
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -26,13 +26,11 @@ object FullTextIndexRoutes {
|
||||
val dsl = Http4sDsl[F]
|
||||
import dsl._
|
||||
|
||||
HttpRoutes.of {
|
||||
case POST -> Root / "reIndex" =>
|
||||
for {
|
||||
res <- backend.fulltext.reindexCollective(user.account).attempt
|
||||
resp <-
|
||||
Ok(Conversions.basicResult(res, "Full-text index will be re-created."))
|
||||
} yield resp
|
||||
HttpRoutes.of { case POST -> Root / "reIndex" =>
|
||||
for {
|
||||
res <- backend.fulltext.reindexCollective(user.account).attempt
|
||||
resp <- Ok(Conversions.basicResult(res, "Full-text index will be re-created."))
|
||||
} yield resp
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,16 +40,14 @@ object FullTextIndexRoutes {
|
||||
val dsl = Http4sDsl[F]
|
||||
import dsl._
|
||||
|
||||
HttpRoutes.of {
|
||||
case POST -> Root / "reIndexAll" / Ident(id) =>
|
||||
for {
|
||||
res <-
|
||||
if (id.nonEmpty && id == cfg.fullTextSearch.recreateKey)
|
||||
backend.fulltext.reindexAll.attempt
|
||||
else Left(new Exception("The provided key is invalid.")).pure[F]
|
||||
resp <-
|
||||
Ok(Conversions.basicResult(res, "Full-text index will be re-created."))
|
||||
} yield resp
|
||||
HttpRoutes.of { case POST -> Root / "reIndexAll" / Ident(id) =>
|
||||
for {
|
||||
res <-
|
||||
if (id.nonEmpty && id == cfg.fullTextSearch.recreateKey)
|
||||
backend.fulltext.reindexAll.attempt
|
||||
else Left(new Exception("The provided key is invalid.")).pure[F]
|
||||
resp <- Ok(Conversions.basicResult(res, "Full-text index will be re-created."))
|
||||
} yield resp
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -14,17 +14,16 @@ object InfoRoutes {
|
||||
def apply[F[_]: Sync](): HttpRoutes[F] = {
|
||||
val dsl = new Http4sDsl[F] {}
|
||||
import dsl._
|
||||
HttpRoutes.of[F] {
|
||||
case GET -> (Root / "version") =>
|
||||
Ok(
|
||||
VersionInfo(
|
||||
BuildInfo.version,
|
||||
BuildInfo.builtAtMillis,
|
||||
BuildInfo.builtAtString,
|
||||
BuildInfo.gitHeadCommit.getOrElse(""),
|
||||
BuildInfo.gitDescribedVersion.getOrElse("")
|
||||
)
|
||||
HttpRoutes.of[F] { case GET -> (Root / "version") =>
|
||||
Ok(
|
||||
VersionInfo(
|
||||
BuildInfo.version,
|
||||
BuildInfo.builtAtMillis,
|
||||
BuildInfo.builtAtString,
|
||||
BuildInfo.gitHeadCommit.getOrElse(""),
|
||||
BuildInfo.gitDescribedVersion.getOrElse("")
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,13 +19,12 @@ object LoginRoutes {
|
||||
val dsl: Http4sDsl[F] = new Http4sDsl[F] {}
|
||||
import dsl._
|
||||
|
||||
HttpRoutes.of[F] {
|
||||
case req @ POST -> Root / "login" =>
|
||||
for {
|
||||
up <- req.as[UserPass]
|
||||
res <- S.loginUserPass(cfg.auth)(Login.UserPass(up.account, up.password))
|
||||
resp <- makeResponse(dsl, cfg, res, up.account)
|
||||
} yield resp
|
||||
HttpRoutes.of[F] { case req @ POST -> Root / "login" =>
|
||||
for {
|
||||
up <- req.as[UserPass]
|
||||
res <- S.loginUserPass(cfg.auth)(Login.UserPass(up.account, up.password))
|
||||
resp <- makeResponse(dsl, cfg, res, up.account)
|
||||
} yield resp
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,17 +23,16 @@ object MailSendRoutes {
|
||||
val dsl = new Http4sDsl[F] {}
|
||||
import dsl._
|
||||
|
||||
HttpRoutes.of {
|
||||
case req @ POST -> Root / Ident(name) / Ident(id) =>
|
||||
for {
|
||||
in <- req.as[SimpleMail]
|
||||
mail = convertIn(id, in)
|
||||
res <- mail.traverse(m => backend.mail.sendMail(user.account, name, m))
|
||||
resp <- res.fold(
|
||||
err => Ok(BasicResult(false, s"Invalid mail data: $err")),
|
||||
res => Ok(convertOut(res))
|
||||
)
|
||||
} yield resp
|
||||
HttpRoutes.of { case req @ POST -> Root / Ident(name) / Ident(id) =>
|
||||
for {
|
||||
in <- req.as[SimpleMail]
|
||||
mail = convertIn(id, in)
|
||||
res <- mail.traverse(m => backend.mail.sendMail(user.account, name, m))
|
||||
resp <- res.fold(
|
||||
err => Ok(BasicResult(false, s"Invalid mail data: $err")),
|
||||
res => Ok(convertOut(res))
|
||||
)
|
||||
} yield resp
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -40,20 +40,18 @@ object TemplateRoutes {
|
||||
import dsl._
|
||||
new InnerRoutes[F] {
|
||||
def doc =
|
||||
HttpRoutes.of[F] {
|
||||
case GET -> Root =>
|
||||
for {
|
||||
templ <- docTemplate
|
||||
resp <- Ok(DocData().render(templ), `Content-Type`(`text/html`))
|
||||
} yield resp
|
||||
HttpRoutes.of[F] { case GET -> Root =>
|
||||
for {
|
||||
templ <- docTemplate
|
||||
resp <- Ok(DocData().render(templ), `Content-Type`(`text/html`))
|
||||
} yield resp
|
||||
}
|
||||
def app =
|
||||
HttpRoutes.of[F] {
|
||||
case GET -> _ =>
|
||||
for {
|
||||
templ <- indexTemplate
|
||||
resp <- Ok(IndexData(cfg).render(templ), `Content-Type`(`text/html`))
|
||||
} yield resp
|
||||
HttpRoutes.of[F] { case GET -> _ =>
|
||||
for {
|
||||
templ <- indexTemplate
|
||||
resp <- Ok(IndexData(cfg).render(templ), `Content-Type`(`text/html`))
|
||||
} yield resp
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -56,8 +56,8 @@ trait DoobieSyntax {
|
||||
def insertRows(table: Fragment, cols: List[Column], vals: List[Fragment]): Fragment =
|
||||
Fragment.const("INSERT INTO ") ++ table ++ Fragment.const(" (") ++
|
||||
commas(cols.map(_.f)) ++ Fragment.const(") VALUES ") ++ commas(
|
||||
vals.map(f => sql"(" ++ f ++ sql")")
|
||||
)
|
||||
vals.map(f => sql"(" ++ f ++ sql")")
|
||||
)
|
||||
|
||||
def selectSimple(cols: Seq[Column], table: Fragment, where: Fragment): Fragment =
|
||||
selectSimple(commas(cols.map(_.f)), table, where)
|
||||
|
@ -103,11 +103,11 @@ object QAttachment {
|
||||
.prefix("m")
|
||||
.f ++ fr"FROM" ++ RAttachmentMeta.table ++ fr"m" ++
|
||||
fr"INNER JOIN" ++ RAttachment.table ++ fr"a ON" ++ AC.id
|
||||
.prefix("a")
|
||||
.is(MC.id.prefix("m")) ++
|
||||
.prefix("a")
|
||||
.is(MC.id.prefix("m")) ++
|
||||
fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ AC.itemId
|
||||
.prefix("a")
|
||||
.is(IC.id.prefix("i")) ++
|
||||
.prefix("a")
|
||||
.is(IC.id.prefix("i")) ++
|
||||
fr"WHERE" ++ and(AC.itemId.prefix("a").is(itemId), IC.cid.prefix("i").is(coll))
|
||||
|
||||
for {
|
||||
@ -128,15 +128,15 @@ object QAttachment {
|
||||
MC.all.map(_.prefix("m").f)
|
||||
) ++ fr"FROM" ++ RItem.table ++ fr"i" ++
|
||||
fr"INNER JOIN" ++ RAttachment.table ++ fr"a ON" ++ IC.id
|
||||
.prefix("i")
|
||||
.is(AC.itemId.prefix("a")) ++
|
||||
.prefix("i")
|
||||
.is(AC.itemId.prefix("a")) ++
|
||||
fr"INNER JOIN" ++ RAttachmentMeta.table ++ fr"m ON" ++ AC.id
|
||||
.prefix("a")
|
||||
.is(MC.id.prefix("m")) ++
|
||||
.prefix("a")
|
||||
.is(MC.id.prefix("m")) ++
|
||||
fr"WHERE" ++ and(
|
||||
AC.id.prefix("a").is(attachId),
|
||||
IC.cid.prefix("i").is(collective)
|
||||
)
|
||||
AC.id.prefix("a").is(attachId),
|
||||
IC.cid.prefix("i").is(collective)
|
||||
)
|
||||
|
||||
q.query[RAttachmentMeta].option
|
||||
}
|
||||
|
@ -82,14 +82,14 @@ object QCollective {
|
||||
) ++
|
||||
fr"FROM" ++ RTagItem.table ++ fr"r" ++
|
||||
fr"INNER JOIN" ++ RTag.table ++ fr"t ON" ++ RC.tagId
|
||||
.prefix("r")
|
||||
.is(TC.tid.prefix("t")) ++
|
||||
.prefix("r")
|
||||
.is(TC.tid.prefix("t")) ++
|
||||
fr"WHERE" ++ TC.cid.prefix("t").is(coll) ++
|
||||
fr"GROUP BY" ++ commas(
|
||||
TC.name.prefix("t").f,
|
||||
TC.tid.prefix("t").f,
|
||||
TC.category.prefix("t").f
|
||||
)
|
||||
TC.name.prefix("t").f,
|
||||
TC.tid.prefix("t").f,
|
||||
TC.category.prefix("t").f
|
||||
)
|
||||
|
||||
q3.query[TagCount].to[List]
|
||||
}
|
||||
|
@ -94,23 +94,23 @@ object QItem {
|
||||
Fragment.empty
|
||||
) ++
|
||||
fr"LEFT JOIN" ++ ROrganization.table ++ fr"o ON" ++ RItem.Columns.corrOrg
|
||||
.prefix("i")
|
||||
.is(ROrganization.Columns.oid.prefix("o")) ++
|
||||
.prefix("i")
|
||||
.is(ROrganization.Columns.oid.prefix("o")) ++
|
||||
fr"LEFT JOIN" ++ RPerson.table ++ fr"p0 ON" ++ RItem.Columns.corrPerson
|
||||
.prefix("i")
|
||||
.is(RPerson.Columns.pid.prefix("p0")) ++
|
||||
.prefix("i")
|
||||
.is(RPerson.Columns.pid.prefix("p0")) ++
|
||||
fr"LEFT JOIN" ++ RPerson.table ++ fr"p1 ON" ++ RItem.Columns.concPerson
|
||||
.prefix("i")
|
||||
.is(RPerson.Columns.pid.prefix("p1")) ++
|
||||
.prefix("i")
|
||||
.is(RPerson.Columns.pid.prefix("p1")) ++
|
||||
fr"LEFT JOIN" ++ REquipment.table ++ fr"e ON" ++ RItem.Columns.concEquipment
|
||||
.prefix("i")
|
||||
.is(REquipment.Columns.eid.prefix("e")) ++
|
||||
.prefix("i")
|
||||
.is(REquipment.Columns.eid.prefix("e")) ++
|
||||
fr"LEFT JOIN" ++ RItem.table ++ fr"ref ON" ++ RItem.Columns.inReplyTo
|
||||
.prefix("i")
|
||||
.is(RItem.Columns.id.prefix("ref")) ++
|
||||
.prefix("i")
|
||||
.is(RItem.Columns.id.prefix("ref")) ++
|
||||
fr"LEFT JOIN" ++ RFolder.table ++ fr"f ON" ++ RItem.Columns.folder
|
||||
.prefix("i")
|
||||
.is(RFolder.Columns.id.prefix("f")) ++
|
||||
.prefix("i")
|
||||
.is(RFolder.Columns.id.prefix("f")) ++
|
||||
fr"WHERE" ++ RItem.Columns.id.prefix("i").is(id)
|
||||
|
||||
val q = cq
|
||||
@ -312,8 +312,8 @@ object QItem {
|
||||
fr"LEFT JOIN orgs o0 ON" ++ IC.corrOrg.prefix("i").is(OC.oid.prefix("o0")) ++
|
||||
fr"LEFT JOIN persons p1 ON" ++ IC.concPerson.prefix("i").is(PC.pid.prefix("p1")) ++
|
||||
fr"LEFT JOIN equips e1 ON" ++ IC.concEquipment
|
||||
.prefix("i")
|
||||
.is(EC.eid.prefix("e1")) ++
|
||||
.prefix("i")
|
||||
.is(EC.eid.prefix("e1")) ++
|
||||
fr"LEFT JOIN folders f1 ON" ++ IC.folder.prefix("i").is(FC.id.prefix("f1"))
|
||||
}
|
||||
|
||||
|
@ -45,10 +45,9 @@ object QOrganization {
|
||||
.query[(ROrganization, Option[RContact])]
|
||||
.stream
|
||||
.groupAdjacentBy(_._1)
|
||||
.map({
|
||||
case (ro, chunk) =>
|
||||
val cs = chunk.toVector.flatMap(_._2)
|
||||
(ro, cs)
|
||||
.map({ case (ro, chunk) =>
|
||||
val cs = chunk.toVector.flatMap(_._2)
|
||||
(ro, cs)
|
||||
})
|
||||
}
|
||||
|
||||
@ -71,10 +70,9 @@ object QOrganization {
|
||||
.query[(ROrganization, Option[RContact])]
|
||||
.stream
|
||||
.groupAdjacentBy(_._1)
|
||||
.map({
|
||||
case (ro, chunk) =>
|
||||
val cs = chunk.toVector.flatMap(_._2)
|
||||
(ro, cs)
|
||||
.map({ case (ro, chunk) =>
|
||||
val cs = chunk.toVector.flatMap(_._2)
|
||||
(ro, cs)
|
||||
})
|
||||
.compile
|
||||
.last
|
||||
@ -109,10 +107,9 @@ object QOrganization {
|
||||
.query[(RPerson, Option[RContact])]
|
||||
.stream
|
||||
.groupAdjacentBy(_._1)
|
||||
.map({
|
||||
case (ro, chunk) =>
|
||||
val cs = chunk.toVector.flatMap(_._2)
|
||||
(ro, cs)
|
||||
.map({ case (ro, chunk) =>
|
||||
val cs = chunk.toVector.flatMap(_._2)
|
||||
(ro, cs)
|
||||
})
|
||||
}
|
||||
|
||||
@ -135,10 +132,9 @@ object QOrganization {
|
||||
.query[(RPerson, Option[RContact])]
|
||||
.stream
|
||||
.groupAdjacentBy(_._1)
|
||||
.map({
|
||||
case (ro, chunk) =>
|
||||
val cs = chunk.toVector.flatMap(_._2)
|
||||
(ro, cs)
|
||||
.map({ case (ro, chunk) =>
|
||||
val cs = chunk.toVector.flatMap(_._2)
|
||||
(ro, cs)
|
||||
})
|
||||
.compile
|
||||
.last
|
||||
|
@ -152,8 +152,8 @@ object RAttachment {
|
||||
): ConnectionIO[Vector[RAttachment]] = {
|
||||
val q = selectSimple(all.map(_.prefix("a")), table ++ fr"a", Fragment.empty) ++
|
||||
fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ RItem.Columns.id
|
||||
.prefix("i")
|
||||
.is(itemId.prefix("a")) ++
|
||||
.prefix("i")
|
||||
.is(itemId.prefix("a")) ++
|
||||
fr"WHERE" ++ and(itemId.prefix("a").is(id), RItem.Columns.cid.prefix("i").is(coll))
|
||||
q.query[RAttachment].to[Vector]
|
||||
}
|
||||
|
@ -106,13 +106,13 @@ object ROrganization {
|
||||
val q = fr"SELECT DISTINCT" ++ commas(oid.prefix("o").f, name.prefix("o").f) ++
|
||||
fr"FROM" ++ table ++ fr"o" ++
|
||||
fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.orgId
|
||||
.prefix("c")
|
||||
.is(oid.prefix("o")) ++
|
||||
.prefix("c")
|
||||
.is(oid.prefix("o")) ++
|
||||
fr"WHERE" ++ and(
|
||||
cid.prefix("o").is(coll),
|
||||
CC.kind.prefix("c").is(contactKind),
|
||||
CC.value.prefix("c").lowerLike(value)
|
||||
)
|
||||
cid.prefix("o").is(coll),
|
||||
CC.kind.prefix("c").is(contactKind),
|
||||
CC.value.prefix("c").lowerLike(value)
|
||||
)
|
||||
|
||||
q.query[IdRef].to[Vector]
|
||||
}
|
||||
|
@ -128,14 +128,14 @@ object RPerson {
|
||||
val q = fr"SELECT DISTINCT" ++ commas(pid.prefix("p").f, name.prefix("p").f) ++
|
||||
fr"FROM" ++ table ++ fr"p" ++
|
||||
fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.personId
|
||||
.prefix("c")
|
||||
.is(pid.prefix("p")) ++
|
||||
.prefix("c")
|
||||
.is(pid.prefix("p")) ++
|
||||
fr"WHERE" ++ and(
|
||||
cid.prefix("p").is(coll),
|
||||
CC.kind.prefix("c").is(contactKind),
|
||||
concerning.prefix("p").is(concerningOnly),
|
||||
CC.value.prefix("c").lowerLike(value)
|
||||
)
|
||||
cid.prefix("p").is(coll),
|
||||
CC.kind.prefix("c").is(contactKind),
|
||||
concerning.prefix("p").is(concerningOnly),
|
||||
CC.value.prefix("c").lowerLike(value)
|
||||
)
|
||||
|
||||
q.query[IdRef].to[Vector]
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user