scalafmtAll

This commit is contained in:
Eike Kettner 2020-03-26 18:26:00 +01:00
parent 09ea724c13
commit 9656ba62f4
91 changed files with 871 additions and 295 deletions

View File

@ -1,5 +1,8 @@
<img align="right" src="./artwork/logo-only.svg" height="150px" style="padding-left: 20px"/>
[![Build Status](https://travis-ci.org/eikek/docspell.svg?branch=master)](https://travis-ci.org/eikek/docspell)
[![Scala Steward badge](https://img.shields.io/badge/Scala_Steward-helping-blue.svg?style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA4AAAAQCAMAAAARSr4IAAAAVFBMVEUAAACHjojlOy5NWlrKzcYRKjGFjIbp293YycuLa3pYY2LSqql4f3pCUFTgSjNodYRmcXUsPD/NTTbjRS+2jomhgnzNc223cGvZS0HaSD0XLjbaSjElhIr+AAAAAXRSTlMAQObYZgAAAHlJREFUCNdNyosOwyAIhWHAQS1Vt7a77/3fcxxdmv0xwmckutAR1nkm4ggbyEcg/wWmlGLDAA3oL50xi6fk5ffZ3E2E3QfZDCcCN2YtbEWZt+Drc6u6rlqv7Uk0LdKqqr5rk2UCRXOk0vmQKGfc94nOJyQjouF9H/wCc9gECEYfONoAAAAASUVORK5CYII=)](https://scala-steward.org)
# Docspell

View File

@ -31,7 +31,8 @@ object Domain {
case Nil => Left(s"Not a domain: $str")
case segs
if segs.forall(label =>
label.trim.nonEmpty && label.forall(c => c.isLetter || c.isDigit || c == '-')
label.trim.nonEmpty && label
.forall(c => c.isLetter || c.isDigit || c == '-')
) =>
Right(Domain(NonEmptyList.fromListUnsafe(segs), tld))
case _ => Left(s"Not a domain: $str")

View File

@ -21,7 +21,12 @@ object DateFind {
.map(sd =>
NerDateLabel(
sd.toLocalDate,
NerLabel(text.substring(q.head.begin, q(2).end), NerTag.Date, q.head.begin, q(1).end)
NerLabel(
text.substring(q.head.begin, q(2).end),
NerTag.Date,
q.head.begin,
q(1).end
)
)
)
)
@ -62,7 +67,9 @@ object DateFind {
)
def readMonth: Reader[Int] =
Reader.readFirst(w => Some(months.indexWhere(_.contains(w.value))).filter(_ > 0).map(_ + 1))
Reader.readFirst(w =>
Some(months.indexWhere(_.contains(w.value))).filter(_ > 0).map(_ + 1)
)
def readDay: Reader[Int] =
Reader.readFirst(w => Try(w.value.toInt).filter(n => n > 0 && n <= 31).toOption)
@ -89,8 +96,9 @@ object DateFind {
def readFirst[A](f: Word => Option[A]): Reader[A] =
Reader({
case Nil => Result.Failure
case a :: as => f(a).map(value => Result.Success(value, as)).getOrElse(Result.Failure)
case Nil => Result.Failure
case a :: as =>
f(a).map(value => Result.Success(value, as)).getOrElse(Result.Failure)
})
}

View File

@ -57,7 +57,9 @@ object StanfordNerClassifier {
"/edu/stanford/nlp/models/ner/german.conll.germeval2014.hgc_175m_600.crf.ser.gz"
)
case Language.English =>
getClass.getResource("/edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz")
getClass.getResource(
"/edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz"
)
})
}
}

View File

@ -7,7 +7,8 @@ import docspell.common._
object TextAnalyserSuite extends SimpleTestSuite {
test("find english ner labels") {
val labels = StanfordNerClassifier.nerAnnotate(Language.English)(TestFiles.letterENText)
val labels =
StanfordNerClassifier.nerAnnotate(Language.English)(TestFiles.letterENText)
val expect = Vector(
NerLabel("Derek", NerTag.Person, 0, 5),
NerLabel("Jeter", NerTag.Person, 6, 11),
@ -34,7 +35,8 @@ object TextAnalyserSuite extends SimpleTestSuite {
}
test("find german ner labels") {
val labels = StanfordNerClassifier.nerAnnotate(Language.German)(TestFiles.letterDEText)
val labels =
StanfordNerClassifier.nerAnnotate(Language.German)(TestFiles.letterDEText)
val expect = Vector(
NerLabel("Max", NerTag.Person, 0, 3),
NerLabel("Mustermann", NerTag.Person, 4, 14),

View File

@ -75,6 +75,7 @@ object AuthToken {
Either.catchNonFatal(s.toLong).toOption
private def constTimeEq(s1: String, s2: String): Boolean =
s1.zip(s2).foldLeft(true)({ case (r, (c1, c2)) => r & c1 == c2 }) & s1.length == s2.length
s1.zip(s2)
.foldLeft(true)({ case (r, (c1, c2)) => r & c1 == c2 }) & s1.length == s2.length
}

View File

@ -58,7 +58,12 @@ object OCollective {
def updateFailed: PassChangeResult = UpdateFailed
}
case class RegisterData(collName: Ident, login: Ident, password: Password, invite: Option[Ident])
case class RegisterData(
collName: Ident,
login: Ident,
password: Password,
invite: Option[Ident]
)
sealed trait RegisterResult {
def toEither: Either[Throwable, Unit]
@ -117,7 +122,8 @@ object OCollective {
.traverse(_ => RUser.updatePassword(accountId, PasswordCrypt.crypt(newPass)))
res = check match {
case Some(true) =>
if (n.getOrElse(0) > 0) PassChangeResult.success else PassChangeResult.updateFailed
if (n.getOrElse(0) > 0) PassChangeResult.success
else PassChangeResult.updateFailed
case Some(false) =>
PassChangeResult.passwordMismatch
case None =>

View File

@ -8,7 +8,14 @@ import doobie._
import doobie.implicits._
import docspell.store.{AddResult, Store}
import docspell.store.queries.{QAttachment, QItem}
import OItem.{AttachmentArchiveData, AttachmentData, AttachmentSourceData, ItemData, ListItem, Query}
import OItem.{
AttachmentArchiveData,
AttachmentData,
AttachmentSourceData,
ItemData,
ListItem,
Query
}
import bitpeace.{FileMeta, RangeDef}
import docspell.common.{Direction, Ident, ItemState, MetaProposalList, Timestamp}
import docspell.store.records._
@ -21,9 +28,15 @@ trait OItem[F[_]] {
def findAttachment(id: Ident, collective: Ident): F[Option[AttachmentData[F]]]
def findAttachmentSource(id: Ident, collective: Ident): F[Option[AttachmentSourceData[F]]]
def findAttachmentSource(
id: Ident,
collective: Ident
): F[Option[AttachmentSourceData[F]]]
def findAttachmentArchive(id: Ident, collective: Ident): F[Option[AttachmentArchiveData[F]]]
def findAttachmentArchive(
id: Ident,
collective: Ident
): F[Option[AttachmentArchiveData[F]]]
def setTags(item: Ident, tagIds: List[Ident], collective: Ident): F[AddResult]
@ -45,7 +58,11 @@ trait OItem[F[_]] {
def setItemDate(item: Ident, date: Option[Timestamp], collective: Ident): F[AddResult]
def setItemDueDate(item: Ident, date: Option[Timestamp], collective: Ident): F[AddResult]
def setItemDueDate(
item: Ident,
date: Option[Timestamp],
collective: Ident
): F[AddResult]
def getProposals(item: Ident, collective: Ident): F[MetaProposalList]
@ -104,7 +121,9 @@ object OItem {
Resource.pure[F, OItem[F]](new OItem[F] {
def findItem(id: Ident, collective: Ident): F[Option[ItemData]] =
store.transact(QItem.findItem(id)).map(opt => opt.flatMap(_.filterCollective(collective)))
store
.transact(QItem.findItem(id))
.map(opt => opt.flatMap(_.filterCollective(collective)))
def findItems(q: Query, maxResults: Int): F[Vector[ListItem]] =
store.transact(QItem.findItems(q).take(maxResults.toLong)).compile.toVector
@ -126,7 +145,10 @@ object OItem {
(None: Option[AttachmentData[F]]).pure[F]
})
def findAttachmentSource(id: Ident, collective: Ident): F[Option[AttachmentSourceData[F]]] =
def findAttachmentSource(
id: Ident,
collective: Ident
): F[Option[AttachmentSourceData[F]]] =
store
.transact(RAttachmentSource.findByIdAndCollective(id, collective))
.flatMap({
@ -143,7 +165,10 @@ object OItem {
(None: Option[AttachmentSourceData[F]]).pure[F]
})
def findAttachmentArchive(id: Ident, collective: Ident): F[Option[AttachmentArchiveData[F]]] =
def findAttachmentArchive(
id: Ident,
collective: Ident
): F[Option[AttachmentArchiveData[F]]] =
store
.transact(RAttachmentArchive.findByIdAndCollective(id, collective))
.flatMap({
@ -183,38 +208,63 @@ object OItem {
store.transact(db).attempt.map(AddResult.fromUpdate)
}
def setDirection(item: Ident, direction: Direction, collective: Ident): F[AddResult] =
def setDirection(
item: Ident,
direction: Direction,
collective: Ident
): F[AddResult] =
store
.transact(RItem.updateDirection(item, collective, direction))
.attempt
.map(AddResult.fromUpdate)
def setCorrOrg(item: Ident, org: Option[Ident], collective: Ident): F[AddResult] =
store.transact(RItem.updateCorrOrg(item, collective, org)).attempt.map(AddResult.fromUpdate)
store
.transact(RItem.updateCorrOrg(item, collective, org))
.attempt
.map(AddResult.fromUpdate)
def setCorrPerson(item: Ident, person: Option[Ident], collective: Ident): F[AddResult] =
def setCorrPerson(
item: Ident,
person: Option[Ident],
collective: Ident
): F[AddResult] =
store
.transact(RItem.updateCorrPerson(item, collective, person))
.attempt
.map(AddResult.fromUpdate)
def setConcPerson(item: Ident, person: Option[Ident], collective: Ident): F[AddResult] =
def setConcPerson(
item: Ident,
person: Option[Ident],
collective: Ident
): F[AddResult] =
store
.transact(RItem.updateConcPerson(item, collective, person))
.attempt
.map(AddResult.fromUpdate)
def setConcEquip(item: Ident, equip: Option[Ident], collective: Ident): F[AddResult] =
def setConcEquip(
item: Ident,
equip: Option[Ident],
collective: Ident
): F[AddResult] =
store
.transact(RItem.updateConcEquip(item, collective, equip))
.attempt
.map(AddResult.fromUpdate)
def setNotes(item: Ident, notes: Option[String], collective: Ident): F[AddResult] =
store.transact(RItem.updateNotes(item, collective, notes)).attempt.map(AddResult.fromUpdate)
store
.transact(RItem.updateNotes(item, collective, notes))
.attempt
.map(AddResult.fromUpdate)
def setName(item: Ident, name: String, collective: Ident): F[AddResult] =
store.transact(RItem.updateName(item, collective, name)).attempt.map(AddResult.fromUpdate)
store
.transact(RItem.updateName(item, collective, name))
.attempt
.map(AddResult.fromUpdate)
def setState(item: Ident, state: ItemState, collective: Ident): F[AddResult] =
store
@ -222,10 +272,21 @@ object OItem {
.attempt
.map(AddResult.fromUpdate)
def setItemDate(item: Ident, date: Option[Timestamp], collective: Ident): F[AddResult] =
store.transact(RItem.updateDate(item, collective, date)).attempt.map(AddResult.fromUpdate)
def setItemDate(
item: Ident,
date: Option[Timestamp],
collective: Ident
): F[AddResult] =
store
.transact(RItem.updateDate(item, collective, date))
.attempt
.map(AddResult.fromUpdate)
def setItemDueDate(item: Ident, date: Option[Timestamp], collective: Ident): F[AddResult] =
def setItemDueDate(
item: Ident,
date: Option[Timestamp],
collective: Ident
): F[AddResult] =
store
.transact(RItem.updateDueDate(item, collective, date))
.attempt

View File

@ -52,7 +52,9 @@ object OJob {
def mustCancel(job: Option[RJob]): Option[(RJob, Ident)] =
for {
worker <- job.flatMap(_.worker)
job <- job.filter(j => j.state == JobState.Scheduled || j.state == JobState.Running)
job <- job.filter(j =>
j.state == JobState.Scheduled || j.state == JobState.Running
)
} yield (job, worker)
def canDelete(j: RJob): Boolean =
@ -68,8 +70,11 @@ object OJob {
}
def tryCancel(job: RJob, worker: Ident): F[JobCancelResult] =
joex.cancelJob(job.id, worker)
.map(flag => if (flag) JobCancelResult.CancelRequested else JobCancelResult.JobNotFound)
joex
.cancelJob(job.id, worker)
.map(flag =>
if (flag) JobCancelResult.CancelRequested else JobCancelResult.JobNotFound
)
for {
tryDel <- store.transact(tryDelete)

View File

@ -34,7 +34,10 @@ object OJoex {
} yield cancel.isDefined
})
def create[F[_]: ConcurrentEffect](ec: ExecutionContext, store: Store[F]): Resource[F, OJoex[F]] =
def create[F[_]: ConcurrentEffect](
ec: ExecutionContext,
store: Store[F]
): Resource[F, OJoex[F]] =
JoexClient.resource(ec).flatMap(client => apply(client, store))
}

View File

@ -149,8 +149,9 @@ object OMail {
)
} yield {
val addAttach = m.attach.filter(ras).map { a =>
Attach[F](Stream.emit(a._2).through(store.bitpeace.fetchData2(RangeDef.all)))
.withFilename(a._1.name)
Attach[F](
Stream.emit(a._2).through(store.bitpeace.fetchData2(RangeDef.all))
).withFilename(a._1.name)
.withLength(a._2.length)
.withMimeType(_root_.emil.MimeType.parse(a._2.mimetype.asString).toOption)
}
@ -187,7 +188,10 @@ object OMail {
store.transact(save.value).attempt.map {
case Right(Some(id)) => Right(id)
case Right(None) =>
Left(SendResult.StoreFailure(new Exception(s"Could not find user to save mail.")))
Left(
SendResult
.StoreFailure(new Exception(s"Could not find user to save mail."))
)
case Left(ex) => Left(SendResult.StoreFailure(ex))
}
}

View File

@ -17,7 +17,10 @@ trait OOrganization[F[_]] {
def updateOrg(s: OrgAndContacts): F[AddResult]
def findAllPerson(account: AccountId, query: Option[String]): F[Vector[PersonAndContacts]]
def findAllPerson(
account: AccountId,
query: Option[String]
): F[Vector[PersonAndContacts]]
def findAllPersonRefs(account: AccountId, nameQuery: Option[String]): F[Vector[IdRef]]
@ -39,14 +42,20 @@ object OOrganization {
def apply[F[_]: Effect](store: Store[F]): Resource[F, OOrganization[F]] =
Resource.pure[F, OOrganization[F]](new OOrganization[F] {
def findAllOrg(account: AccountId, query: Option[String]): F[Vector[OrgAndContacts]] =
def findAllOrg(
account: AccountId,
query: Option[String]
): F[Vector[OrgAndContacts]] =
store
.transact(QOrganization.findOrgAndContact(account.collective, query, _.name))
.map({ case (org, cont) => OrgAndContacts(org, cont) })
.compile
.toVector
def findAllOrgRefs(account: AccountId, nameQuery: Option[String]): F[Vector[IdRef]] =
def findAllOrgRefs(
account: AccountId,
nameQuery: Option[String]
): F[Vector[IdRef]] =
store.transact(ROrganization.findAllRef(account.collective, nameQuery, _.name))
def addOrg(s: OrgAndContacts): F[AddResult] =
@ -55,14 +64,20 @@ object OOrganization {
def updateOrg(s: OrgAndContacts): F[AddResult] =
QOrganization.updateOrg(s.org, s.contacts, s.org.cid)(store)
def findAllPerson(account: AccountId, query: Option[String]): F[Vector[PersonAndContacts]] =
def findAllPerson(
account: AccountId,
query: Option[String]
): F[Vector[PersonAndContacts]] =
store
.transact(QOrganization.findPersonAndContact(account.collective, query, _.name))
.map({ case (person, cont) => PersonAndContacts(person, cont) })
.compile
.toVector
def findAllPersonRefs(account: AccountId, nameQuery: Option[String]): F[Vector[IdRef]] =
def findAllPersonRefs(
account: AccountId,
nameQuery: Option[String]
): F[Vector[IdRef]] =
store.transact(RPerson.findAllRef(account.collective, nameQuery, _.name))
def addPerson(s: PersonAndContacts): F[AddResult] =
@ -72,7 +87,10 @@ object OOrganization {
QOrganization.updatePerson(s.person, s.contacts, s.person.cid)(store)
def deleteOrg(orgId: Ident, collective: Ident): F[AddResult] =
store.transact(QOrganization.deleteOrg(orgId, collective)).attempt.map(AddResult.fromUpdate)
store
.transact(QOrganization.deleteOrg(orgId, collective))
.attempt
.map(AddResult.fromUpdate)
def deletePerson(personId: Ident, collective: Ident): F[AddResult] =
store

View File

@ -57,7 +57,10 @@ object OUpload {
): Resource[F, OUpload[F]] =
Resource.pure[F, OUpload[F]](new OUpload[F] {
def submit(data: OUpload.UploadData[F], account: AccountId): F[OUpload.UploadResult] =
def submit(
data: OUpload.UploadData[F],
account: AccountId
): F[OUpload.UploadResult] =
for {
files <- data.files.traverse(saveFile).map(_.flatten)
pred <- checkFileList(files)
@ -74,12 +77,16 @@ object OUpload {
job <- pred.traverse(_ => makeJobs(args, account, data.priority, data.tracker))
_ <- logger.fdebug(s"Storing jobs: $job")
res <- job.traverse(submitJobs)
_ <- store.transact(RSource.incrementCounter(data.meta.sourceAbbrev, account.collective))
_ <- store.transact(
RSource.incrementCounter(data.meta.sourceAbbrev, account.collective)
)
} yield res.fold(identity, identity)
def submit(data: OUpload.UploadData[F], sourceId: Ident): F[OUpload.UploadResult] =
for {
sOpt <- store.transact(RSource.find(sourceId)).map(_.toRight(UploadResult.NoSource))
sOpt <- store
.transact(RSource.find(sourceId))
.map(_.toRight(UploadResult.NoSource))
abbrev = sOpt.map(_.abbrev).toOption.getOrElse(data.meta.sourceAbbrev)
updata = data.copy(meta = data.meta.copy(sourceAbbrev = abbrev))
accId = sOpt.map(source => AccountId(source.cid, source.sid))
@ -106,7 +113,9 @@ object OUpload {
None
}, id => Some(ProcessItemArgs.File(file.name, id))))
private def checkFileList(files: Seq[ProcessItemArgs.File]): F[Either[UploadResult, Unit]] =
private def checkFileList(
files: Seq[ProcessItemArgs.File]
): F[Either[UploadResult, Unit]] =
Sync[F].pure(if (files.isEmpty) Left(UploadResult.NoFiles) else Right(()))
private def makeJobs(

View File

@ -28,7 +28,10 @@ object OSignup {
if (cfg.mode == Config.Mode.Invite) {
if (cfg.newInvitePassword.isEmpty || cfg.newInvitePassword != password)
NewInviteResult.passwordMismatch.pure[F]
else store.transact(RInvitation.insertNew).map(ri => NewInviteResult.success(ri.id))
else
store
.transact(RInvitation.insertNew)
.map(ri => NewInviteResult.success(ri.id))
} else {
Effect[F].pure(NewInviteResult.invitationClosed)
}

View File

@ -21,7 +21,9 @@ object AccountId {
val user = input.substring(n + 1)
Ident
.fromString(coll)
.flatMap(collId => Ident.fromString(user).map(userId => AccountId(collId, userId)))
.flatMap(collId =>
Ident.fromString(user).map(userId => AccountId(collId, userId))
)
case _ =>
invalid
}

View File

@ -18,7 +18,11 @@ object File {
def mkTempDir[F[_]: Sync](parent: Path, prefix: String): F[Path] =
mkDir(parent).map(p => Files.createTempDirectory(p, prefix))
def mkTempFile[F[_]: Sync](parent: Path, prefix: String, suffix: Option[String] = None): F[Path] =
def mkTempFile[F[_]: Sync](
parent: Path,
prefix: String,
suffix: Option[String] = None
): F[Path] =
mkDir(parent).map(p => Files.createTempFile(p, prefix, suffix.orNull))
def deleteDirectory[F[_]: Sync](dir: Path): F[Int] = Sync[F].delay {
@ -26,7 +30,10 @@ object File {
Files.walkFileTree(
dir,
new SimpleFileVisitor[Path]() {
override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = {
override def visitFile(
file: Path,
attrs: BasicFileAttributes
): FileVisitResult = {
Files.deleteIfExists(file)
count.incrementAndGet()
FileVisitResult.CONTINUE
@ -59,11 +66,12 @@ object File {
def withTempDir[F[_]: Sync](parent: Path, prefix: String): Resource[F, Path] =
Resource.make(mkTempDir(parent, prefix))(p => delete(p).map(_ => ()))
def listFiles[F[_]: Sync](pred: Path => Boolean, dir: Path): F[List[Path]] = Sync[F].delay {
val javaList =
Files.list(dir).filter(p => pred(p)).collect(java.util.stream.Collectors.toList())
javaList.asScala.toList.sortBy(_.getFileName.toString)
}
def listFiles[F[_]: Sync](pred: Path => Boolean, dir: Path): F[List[Path]] =
Sync[F].delay {
val javaList =
Files.list(dir).filter(p => pred(p)).collect(java.util.stream.Collectors.toList())
javaList.asScala.toList.sortBy(_.getFileName.toString)
}
def readAll[F[_]: Sync: ContextShift](
file: Path,

View File

@ -31,7 +31,8 @@ object JobState {
/** Finished with success */
case object Success extends JobState {}
val all: Set[JobState] = Set(Waiting, Scheduled, Running, Stuck, Failed, Cancelled, Success)
val all: Set[JobState] =
Set(Waiting, Scheduled, Running, Stuck, Failed, Cancelled, Success)
val queued: Set[JobState] = Set(Waiting, Scheduled, Stuck)
val done: Set[JobState] = Set(Failed, Cancelled, Success)

View File

@ -40,7 +40,9 @@ case class LenientUri(
withQueryPlain(name, URLEncoder.encode(value, "UTF-8"))
def withQueryPlain(name: String, value: String): LenientUri =
copy(query = query.map(q => q + "&" + name + "=" + value).orElse(Option(s"$name=$value")))
copy(query =
query.map(q => q + "&" + name + "=" + value).orElse(Option(s"$name=$value"))
)
def withFragment(f: String): LenientUri =
copy(fragment = Some(f))
@ -56,7 +58,10 @@ case class LenientUri(
)
}
def readURL[F[_]: Sync: ContextShift](chunkSize: Int, blocker: Blocker): Stream[F, Byte] =
def readURL[F[_]: Sync: ContextShift](
chunkSize: Int,
blocker: Blocker
): Stream[F, Byte] =
Stream
.emit(Either.catchNonFatal(new URL(asString)))
.covary[F]
@ -135,7 +140,8 @@ object LenientUri {
case "/" => RootPath
case "" => EmptyPath
case _ =>
NonEmptyList.fromList(stripLeading(str, '/').split('/').toList.map(percentDecode)) match {
NonEmptyList
.fromList(stripLeading(str, '/').split('/').toList.map(percentDecode)) match {
case Some(nl) => NonEmptyPath(nl)
case None => sys.error(s"Invalid url: $str")
}

View File

@ -56,25 +56,32 @@ object MimeType {
def parsePrimary: Either[String, (String, String)] =
str.indexOf('/') match {
case -1 => Left(s"Invalid mediatype: $str")
case n => Right(str.take(n) -> str.drop(n + 1))
case n => Right(str.take(n) -> str.drop(n + 1))
}
def parseSub(s: String): Either[String, (String, String)] =
s.indexOf(';') match {
case -1 => Right((s, ""))
case n => Right((s.take(n), s.drop(n)))
case n => Right((s.take(n), s.drop(n)))
}
def parseParams(s: String): Map[String, String] =
s.split(';').map(_.trim).filter(_.nonEmpty).toList.flatMap(p => p.split("=", 2).toList match {
case a :: b :: Nil => Some((a, b))
case _ => None
}).toMap
s.split(';')
.map(_.trim)
.filter(_.nonEmpty)
.toList
.flatMap(p =>
p.split("=", 2).toList match {
case a :: b :: Nil => Some((a, b))
case _ => None
}
)
.toMap
for {
pt <- parsePrimary
st <- parseSub(pt._2)
pa = parseParams(st._2)
pa = parseParams(st._2)
} yield MimeType(pt._1, st._1, pa)
}

View File

@ -47,14 +47,17 @@ object SystemCommand {
for {
_ <- writeToProcess(stdin, proc, blocker)
term <- Sync[F].delay(proc.waitFor(cmd.timeout.seconds, TimeUnit.SECONDS))
_ <- if (term) logger.debug(s"Command `${cmd.cmdString}` finished: ${proc.exitValue}")
_ <- if (term)
logger.debug(s"Command `${cmd.cmdString}` finished: ${proc.exitValue}")
else
logger.warn(
s"Command `${cmd.cmdString}` did not finish in ${cmd.timeout.formatExact}!"
)
_ <- if (!term) timeoutError(proc, cmd) else Sync[F].pure(())
out <- if (term) inputStreamToString(proc.getInputStream, blocker) else Sync[F].pure("")
err <- if (term) inputStreamToString(proc.getErrorStream, blocker) else Sync[F].pure("")
_ <- if (!term) timeoutError(proc, cmd) else Sync[F].pure(())
out <- if (term) inputStreamToString(proc.getInputStream, blocker)
else Sync[F].pure("")
err <- if (term) inputStreamToString(proc.getErrorStream, blocker)
else Sync[F].pure("")
} yield Result(proc.exitValue, out, err)
}
}
@ -122,12 +125,17 @@ object SystemCommand {
proc: Process,
blocker: Blocker
): F[Unit] =
data.through(io.writeOutputStream(Sync[F].delay(proc.getOutputStream), blocker)).compile.drain
data
.through(io.writeOutputStream(Sync[F].delay(proc.getOutputStream), blocker))
.compile
.drain
private def timeoutError[F[_]: Sync](proc: Process, cmd: Config): F[Unit] =
Sync[F].delay(proc.destroyForcibly()).attempt *> {
Sync[F].raiseError(
new Exception(s"Command `${cmd.cmdString}` timed out (${cmd.timeout.formatExact})")
new Exception(
s"Command `${cmd.cmdString}` timed out (${cmd.timeout.formatExact})"
)
)
}
}

View File

@ -24,11 +24,18 @@ object ThreadFactories {
): Resource[F, ExecutionContextExecutorService] =
Resource.make(Sync[F].delay(c))(ec => Sync[F].delay(ec.shutdown))
def cached[F[_]: Sync](tf: ThreadFactory): Resource[F, ExecutionContextExecutorService] =
def cached[F[_]: Sync](
tf: ThreadFactory
): Resource[F, ExecutionContextExecutorService] =
executorResource(
ExecutionContext.fromExecutorService(Executors.newCachedThreadPool(tf))
)
def fixed[F[_]: Sync](n: Int, tf: ThreadFactory): Resource[F, ExecutionContextExecutorService] =
executorResource(ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(n, tf)))
def fixed[F[_]: Sync](
n: Int,
tf: ThreadFactory
): Resource[F, ExecutionContextExecutorService] =
executorResource(
ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(n, tf))
)
}

View File

@ -16,7 +16,7 @@ case class Timestamp(value: Instant) {
def minus(d: Duration): Timestamp =
Timestamp(value.minusNanos(d.nanos))
def - (d: Duration): Timestamp =
def -(d: Duration): Timestamp =
minus(d)
def minusHours(n: Long): Timestamp =
@ -35,7 +35,7 @@ case class Timestamp(value: Instant) {
def asString: String = value.toString
def < (other: Timestamp): Boolean =
def <(other: Timestamp): Boolean =
this.value.isBefore(other.value)
}

View File

@ -26,17 +26,25 @@ object Implicits {
implicit val byteVectorReader: ConfigReader[ByteVector] =
ConfigReader[String].emap(reason { str =>
if (str.startsWith("hex:")) ByteVector.fromHex(str.drop(4)).toRight("Invalid hex value.")
if (str.startsWith("hex:"))
ByteVector.fromHex(str.drop(4)).toRight("Invalid hex value.")
else if (str.startsWith("b64:"))
ByteVector.fromBase64(str.drop(4)).toRight("Invalid Base64 string.")
else ByteVector.encodeUtf8(str).left.map(ex => s"Invalid utf8 string: ${ex.getMessage}")
else
ByteVector
.encodeUtf8(str)
.left
.map(ex => s"Invalid utf8 string: ${ex.getMessage}")
})
implicit val caleventReader: ConfigReader[CalEvent] =
ConfigReader[String].emap(reason(CalEvent.parse))
def reason[A: ClassTag](f: String => Either[String, A]): String => Either[FailureReason, A] =
def reason[A: ClassTag](
f: String => Either[String, A]
): String => Either[FailureReason, A] =
in =>
f(in).left.map(str => CannotConvert(in, implicitly[ClassTag[A]].runtimeClass.toString, str))
f(in).left.map(str =>
CannotConvert(in, implicitly[ClassTag[A]].runtimeClass.toString, str)
)
}

View File

@ -33,13 +33,21 @@ object NerLabelSpanTest extends SimpleTestSuite {
)
val spans = NerLabelSpan.build(labels)
assertEquals(spans, Vector(
NerLabel("Derek Jeter", NerTag.Person, 0, 11),
NerLabel("Derek Jeter", NerTag.Person, 68, 79),
NerLabel("Syrup Production Old Sticky Pancake Company", NerTag.Organization, 162, 205),
NerLabel("Maple Lane", NerTag.Location, 210, 220),
NerLabel("Little League", NerTag.Organization, 351, 364),
NerLabel("Derek Jeter", NerTag.Person, 1121, 1132)
))
assertEquals(
spans,
Vector(
NerLabel("Derek Jeter", NerTag.Person, 0, 11),
NerLabel("Derek Jeter", NerTag.Person, 68, 79),
NerLabel(
"Syrup Production Old Sticky Pancake Company",
NerTag.Organization,
162,
205
),
NerLabel("Maple Lane", NerTag.Location, 210, 220),
NerLabel("Little League", NerTag.Organization, 351, 364),
NerLabel("Derek Jeter", NerTag.Person, 1121, 1132)
)
)
}
}

View File

@ -43,11 +43,13 @@ object ConversionResult {
case class SuccessPdf[F[_]](pdf: Stream[F, Byte]) extends ConversionResult[F] {
val pdfData = pdf
}
case class SuccessPdfTxt[F[_]](pdf: Stream[F, Byte], txt: F[String]) extends ConversionResult[F] {
case class SuccessPdfTxt[F[_]](pdf: Stream[F, Byte], txt: F[String])
extends ConversionResult[F] {
val pdfData = pdf
}
case class InputMalformed[F[_]](mimeType: MimeType, reason: String) extends ConversionResult[F] {
case class InputMalformed[F[_]](mimeType: MimeType, reason: String)
extends ConversionResult[F] {
val pdfData = Stream.empty
}
}

View File

@ -40,9 +40,18 @@ private[extern] object ExternConv {
in.through(createInput).flatMap { _ =>
SystemCommand
.execSuccess[F](sysCfg, blocker, logger, Some(dir), if (useStdin) in else Stream.empty)
.execSuccess[F](
sysCfg,
blocker,
logger,
Some(dir),
if (useStdin) in
else Stream.empty
)
.evalMap(result =>
logResult(name, result, logger).flatMap(_ => reader(out, result)).flatMap(handler.run)
logResult(name, result, logger)
.flatMap(_ => reader(out, result))
.flatMap(handler.run)
)
}
}
@ -106,7 +115,9 @@ private[extern] object ExternConv {
inFile: Path
): Pipe[F, Byte, Unit] =
in =>
Stream.eval(logger.debug(s"Storing input to file ${inFile} for running $name")).drain ++
Stream
.eval(logger.debug(s"Storing input to file ${inFile} for running $name"))
.drain ++
Stream.eval(storeFile(in, inFile, blocker))
private def logResult[F[_]: Sync](

View File

@ -19,7 +19,15 @@ object Unoconv {
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] =
ExternConv.readResult[F](blocker, chunkSize, logger)
ExternConv.toPDF[F, A]("unoconv", cfg.command, cfg.workingDir, false, blocker, logger, reader)(
ExternConv.toPDF[F, A](
"unoconv",
cfg.command,
cfg.workingDir,
false,
blocker,
logger,
reader
)(
in,
handler
)

View File

@ -18,7 +18,11 @@ import docspell.common._
object Markdown {
def toHtml(is: InputStream, cfg: MarkdownConfig, cs: Charset): Either[Throwable, String] = {
def toHtml(
is: InputStream,
cfg: MarkdownConfig,
cs: Charset
): Either[Throwable, String] = {
val p = createParser()
val r = createRenderer()
Try {
@ -35,7 +39,11 @@ object Markdown {
wrapHtml(r.render(doc), cfg)
}
def toHtml[F[_]: Sync](data: Stream[F, Byte], cfg: MarkdownConfig, cs: Charset): F[String] =
def toHtml[F[_]: Sync](
data: Stream[F, Byte],
cfg: MarkdownConfig,
cs: Charset
): F[String] =
data.through(Binary.decode(cs)).compile.foldMonoid.map(str => toHtml(str, cfg))
private def wrapHtml(body: String, cfg: MarkdownConfig): String =

View File

@ -13,7 +13,11 @@ import docspell.files.ImageSize
trait Extraction[F[_]] {
def extractText(data: Stream[F, Byte], dataType: DataType, lang: Language): F[ExtractResult]
def extractText(
data: Stream[F, Byte],
dataType: DataType,
lang: Language
): F[ExtractResult]
}
@ -71,13 +75,17 @@ object Extraction {
doExtract
}
case None =>
logger.info(s"Cannot read image data from ${mt.asString}. Extracting anyways.") *>
logger.info(
s"Cannot read image data from ${mt.asString}. Extracting anyways."
) *>
doExtract
}
case OdfType.ContainerMatch(_) =>
logger
.info(s"File detected as ${OdfType.container}. Try to read as OpenDocument file.") *>
.info(
s"File detected as ${OdfType.container}. Try to read as OpenDocument file."
) *>
OdfExtract.get(data).map(ExtractResult.fromEither)
case mt @ MimeType("text", sub, _) if !sub.contains("html") =>

View File

@ -135,7 +135,9 @@ object Ocr {
.map(_ => targetFile)
.handleErrorWith { th =>
logger
.warn(s"Unpaper command failed: ${th.getMessage}. Using input file for text extraction.")
.warn(
s"Unpaper command failed: ${th.getMessage}. Using input file for text extraction."
)
Stream.emit(img)
}
}
@ -152,10 +154,15 @@ object Ocr {
): Stream[F, String] =
// tesseract cannot cope with absolute filenames
// so use the parent as working dir
runUnpaperFile(img, config.unpaper.command, img.getParent, blocker, logger).flatMap { uimg =>
val cmd = config.tesseract.command
.replace(Map("{{file}}" -> uimg.getFileName.toString, "{{lang}}" -> fixLanguage(lang)))
SystemCommand.execSuccess[F](cmd, blocker, logger, wd = Some(uimg.getParent)).map(_.stdout)
runUnpaperFile(img, config.unpaper.command, img.getParent, blocker, logger).flatMap {
uimg =>
val cmd = config.tesseract.command
.replace(
Map("{{file}}" -> uimg.getFileName.toString, "{{lang}}" -> fixLanguage(lang))
)
SystemCommand
.execSuccess[F](cmd, blocker, logger, wd = Some(uimg.getParent))
.map(_.stdout)
}
/** Run tesseract on the given image file and return the extracted

View File

@ -41,11 +41,16 @@ object OcrConfig {
Paths.get(System.getProperty("java.io.tmpdir")).resolve("docspell-extraction")
),
unpaper = Unpaper(
SystemCommand.Config("unpaper", Seq("{{infile}}", "{{outfile}}"), Duration.seconds(30))
SystemCommand
.Config("unpaper", Seq("{{infile}}", "{{outfile}}"), Duration.seconds(30))
),
tesseract = Tesseract(
SystemCommand
.Config("tesseract", Seq("{{file}}", "stdout", "-l", "{{lang}}"), Duration.minutes(1))
.Config(
"tesseract",
Seq("{{file}}", "stdout", "-l", "{{lang}}"),
Duration.minutes(1)
)
)
)
}

View File

@ -14,7 +14,9 @@ import fs2.Stream
object PdfboxExtract {
def get[F[_]: Sync](data: Stream[F, Byte]): F[Either[Throwable, String]] =
data.compile.to(Array).map(bytes => Using(PDDocument.load(bytes))(readText).toEither.flatten)
data.compile
.to(Array)
.map(bytes => Using(PDDocument.load(bytes))(readText).toEither.flatten)
def get(is: InputStream): Either[Throwable, String] =
Using(PDDocument.load(is))(readText).toEither.flatten

View File

@ -20,10 +20,16 @@ import docspell.files.TikaMimetype
object PoiExtract {
def get[F[_]: Sync](data: Stream[F, Byte], hint: MimeTypeHint): F[Either[Throwable, String]] =
def get[F[_]: Sync](
data: Stream[F, Byte],
hint: MimeTypeHint
): F[Either[Throwable, String]] =
TikaMimetype.detect(data, hint).flatMap(mt => get(data, mt))
def get[F[_]: Sync](data: Stream[F, Byte], mime: MimeType): F[Either[Throwable, String]] =
def get[F[_]: Sync](
data: Stream[F, Byte],
mime: MimeType
): F[Either[Throwable, String]] =
mime match {
case PoiType.doc =>
getDoc(data)

View File

@ -6,10 +6,11 @@ object PoiType {
val msoffice = MimeType.application("x-tika-msoffice")
val ooxml = MimeType.application("x-tika-ooxml")
val docx = MimeType.application("vnd.openxmlformats-officedocument.wordprocessingml.document")
val xlsx = MimeType.application("vnd.openxmlformats-officedocument.spreadsheetml.sheet")
val xls = MimeType.application("vnd.ms-excel")
val doc = MimeType.application("msword")
val docx =
MimeType.application("vnd.openxmlformats-officedocument.wordprocessingml.document")
val xlsx = MimeType.application("vnd.openxmlformats-officedocument.spreadsheetml.sheet")
val xls = MimeType.application("vnd.ms-excel")
val doc = MimeType.application("msword")
val all = Set(msoffice, ooxml, docx, xlsx, xls, doc)

View File

@ -15,7 +15,10 @@ object Playing extends IOApp {
val x = for {
odsm1 <- TikaMimetype
.detect(rtf, MimeTypeHint.filename(ExampleFiles.examples_sample_rtf.path.segments.last))
.detect(
rtf,
MimeTypeHint.filename(ExampleFiles.examples_sample_rtf.path.segments.last)
)
odsm2 <- TikaMimetype.detect(rtf, MimeTypeHint.none)
} yield (odsm1, odsm2)
println(x.unsafeRunSync())

View File

@ -12,19 +12,23 @@ object ZipTest extends SimpleTestSuite {
test("unzip") {
val zipFile = ExampleFiles.letters_zip.readURL[IO](8192, blocker)
val uncomp = zipFile.through(Zip.unzip(8192, blocker))
val uncomp = zipFile.through(Zip.unzip(8192, blocker))
uncomp.evalMap(entry => {
val x = entry.data.map(_ => 1).foldMonoid.compile.lastOrError
x.map(size => {
if (entry.name.endsWith(".pdf")) {
assertEquals(entry.name, "letter-de.pdf")
assertEquals(size, 34815)
} else {
assertEquals(entry.name, "letter-en.txt")
assertEquals(size, 1131)
uncomp
.evalMap { entry =>
val x = entry.data.map(_ => 1).foldMonoid.compile.lastOrError
x.map { size =>
if (entry.name.endsWith(".pdf")) {
assertEquals(entry.name, "letter-de.pdf")
assertEquals(size, 34815)
} else {
assertEquals(entry.name, "letter-en.txt")
assertEquals(size, 1131)
}
}
})
}).compile.drain.unsafeRunSync
}
.compile
.drain
.unsafeRunSync
}
}

View File

@ -12,8 +12,10 @@ import org.log4s._
object Main extends IOApp {
private[this] val logger = getLogger
val blockingEC = ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-joex-blocking"))
val connectEC = ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-joex-dbconnect"))
val blockingEC =
ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-joex-blocking"))
val connectEC =
ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-joex-dbconnect"))
def run(args: List[String]) = {
args match {
@ -52,9 +54,17 @@ object Main extends IOApp {
blocker = Blocker.liftExecutorService(bec)
} yield Pools(cec, bec, blocker)
pools.use(p =>
JoexServer.stream[IO](cfg, p.connectEC, p.clientEC, p.blocker).compile.drain.as(ExitCode.Success)
JoexServer
.stream[IO](cfg, p.connectEC, p.clientEC, p.blocker)
.compile
.drain
.as(ExitCode.Success)
)
}
case class Pools(connectEC: ExecutionContext, clientEC: ExecutionContext, blocker: Blocker)
case class Pools(
connectEC: ExecutionContext,
clientEC: ExecutionContext,
blocker: Blocker
)
}

View File

@ -6,9 +6,9 @@ import docspell.common._
import HouseKeepingConfig._
case class HouseKeepingConfig(
schedule: CalEvent,
cleanupInvites: CleanupInvites,
cleanupJobs: CleanupJobs
schedule: CalEvent,
cleanupInvites: CleanupInvites,
cleanupJobs: CleanupJobs
)
object HouseKeepingConfig {

View File

@ -16,7 +16,8 @@ object HouseKeepingTask {
val taskName: Ident = Ident.unsafe("housekeeping")
def apply[F[_]: Sync](cfg: Config): Task[F, Unit, Unit] =
Task.log[F](_.info(s"Running house-keeping task now"))
Task
.log[F](_.info(s"Running house-keeping task now"))
.flatMap(_ => CleanupInvitesTask(cfg.houseKeeping.cleanupInvites))
.flatMap(_ => CleanupJobsTask(cfg.houseKeeping.cleanupJobs))

View File

@ -25,7 +25,10 @@ object CreateItem {
Task { ctx =>
def isValidFile(fm: FileMeta) =
ctx.args.meta.validFileTypes.isEmpty ||
ctx.args.meta.validFileTypes.map(_.asString).toSet.contains(fm.mimetype.baseType)
ctx.args.meta.validFileTypes
.map(_.asString)
.toSet
.contains(fm.mimetype.baseType)
def fileMetas(itemId: Ident, now: Timestamp) =
Stream
@ -37,7 +40,9 @@ object CreateItem {
case (f, index) =>
Ident
.randomId[F]
.map(id => RAttachment(id, itemId, f.fileMetaId, index.toInt, now, f.name))
.map(id =>
RAttachment(id, itemId, f.fileMetaId, index.toInt, now, f.name)
)
})
.compile
.toVector
@ -51,7 +56,9 @@ object CreateItem {
)
for {
_ <- ctx.logger.info(s"Creating new item with ${ctx.args.files.size} attachment(s)")
_ <- ctx.logger.info(
s"Creating new item with ${ctx.args.files.size} attachment(s)"
)
time <- Duration.stopTime[F]
it <- item
n <- ctx.store.transact(RItem.insert(it))
@ -61,7 +68,13 @@ object CreateItem {
_ <- logDifferences(ctx, fm, k.sum)
dur <- time
_ <- ctx.logger.info(s"Creating item finished in ${dur.formatExact}")
} yield ItemData(it, fm, Vector.empty, Vector.empty, fm.map(a => a.id -> a.fileId).toMap)
} yield ItemData(
it,
fm,
Vector.empty,
Vector.empty,
fm.map(a => a.id -> a.fileId).toMap
)
}
def insertAttachment[F[_]: Sync](ctx: Context[F, _])(ra: RAttachment): F[Int] = {
@ -79,7 +92,8 @@ object CreateItem {
_ <- if (cand.nonEmpty) ctx.logger.warn("Found existing item with these files.")
else ().pure[F]
ht <- cand.drop(1).traverse(ri => QItem.delete(ctx.store)(ri.id, ri.cid))
_ <- if (ht.sum > 0) ctx.logger.warn(s"Removed ${ht.sum} items with same attachments")
_ <- if (ht.sum > 0)
ctx.logger.warn(s"Removed ${ht.sum} items with same attachments")
else ().pure[F]
rms <- OptionT(
cand.headOption.traverse(ri =>
@ -92,7 +106,9 @@ object CreateItem {
origMap = orig
.map(originFileTuple)
.toMap
} yield cand.headOption.map(ri => ItemData(ri, rms, Vector.empty, Vector.empty, origMap))
} yield cand.headOption.map(ri =>
ItemData(ri, rms, Vector.empty, Vector.empty, origMap)
)
}
private def logDifferences[F[_]: Sync](
@ -114,6 +130,8 @@ object CreateItem {
}
//TODO if no source is present, it must be saved!
private def originFileTuple(t: (RAttachment, Option[RAttachmentSource])): (Ident, Ident) =
private def originFileTuple(
t: (RAttachment, Option[RAttachmentSource])
): (Ident, Ident) =
t._2.map(s => s.id -> s.fileId).getOrElse(t._1.id -> t._1.fileId)
}

View File

@ -24,7 +24,10 @@ case class ItemData(
copy(metas = next)
}
def changeMeta(attachId: Ident, f: RAttachmentMeta => RAttachmentMeta): RAttachmentMeta =
def changeMeta(
attachId: Ident,
f: RAttachmentMeta => RAttachmentMeta
): RAttachmentMeta =
f(findOrCreate(attachId))
def findOrCreate(attachId: Ident): RAttachmentMeta =

View File

@ -10,15 +10,21 @@ import docspell.store.records.{RItem, RJob}
object ItemHandler {
def onCancel[F[_]: Sync: ContextShift]: Task[F, ProcessItemArgs, Unit] =
logWarn("Now cancelling. Deleting potentially created data.").flatMap(_ => deleteByFileIds)
logWarn("Now cancelling. Deleting potentially created data.").flatMap(_ =>
deleteByFileIds
)
def apply[F[_]: ConcurrentEffect: ContextShift](cfg: Config): Task[F, ProcessItemArgs, Unit] =
def apply[F[_]: ConcurrentEffect: ContextShift](
cfg: Config
): Task[F, ProcessItemArgs, Unit] =
CreateItem[F]
.flatMap(itemStateTask(ItemState.Processing))
.flatMap(safeProcess[F](cfg))
.map(_ => ())
def itemStateTask[F[_]: Sync, A](state: ItemState)(data: ItemData): Task[F, A, ItemData] =
def itemStateTask[F[_]: Sync, A](
state: ItemState
)(data: ItemData): Task[F, A, ItemData] =
Task(ctx => ctx.store.transact(RItem.updateState(data.item.id, state)).map(_ => data))
def isLastRetry[F[_]: Sync, A](ctx: Context[F, A]): F[Boolean] =
@ -36,8 +42,9 @@ object ItemHandler {
case Right(d) =>
Task.pure(d)
case Left(ex) =>
logWarn[F]("Processing failed on last retry. Creating item but without proposals.")
.flatMap(_ => itemStateTask(ItemState.Created)(data))
logWarn[F](
"Processing failed on last retry. Creating item but without proposals."
).flatMap(_ => itemStateTask(ItemState.Created)(data))
.andThen(_ => Sync[F].raiseError(ex))
})
case false =>

View File

@ -34,7 +34,7 @@ object TextAnalysis {
for {
list0 <- stanfordNer[F](lang, rm)
list1 <- contactNer[F](rm)
list = list0 ++ list1
list = list0 ++ list1
spans = NerLabelSpan.build(list.toSeq)
dates <- dateNer[F](rm, lang)
} yield (rm.copy(nerlabels = (spans ++ list ++ dates.toNerLabel).toList), dates)
@ -48,11 +48,14 @@ object TextAnalysis {
rm.content.map(Contact.annotate).getOrElse(Vector.empty)
}
def dateNer[F[_]: Sync](rm: RAttachmentMeta, lang: Language): F[AttachmentDates] = Sync[F].delay {
AttachmentDates(
rm,
rm.content.map(txt => DateFind.findDates(txt, lang).toVector).getOrElse(Vector.empty)
)
}
def dateNer[F[_]: Sync](rm: RAttachmentMeta, lang: Language): F[AttachmentDates] =
Sync[F].delay {
AttachmentDates(
rm,
rm.content
.map(txt => DateFind.findDates(txt, lang).toVector)
.getOrElse(Vector.empty)
)
}
}

View File

@ -19,11 +19,13 @@ object TextExtraction {
for {
_ <- ctx.logger.info("Starting text extraction")
start <- Duration.stopTime[F]
txt <- item.attachments.traverse(extractTextIfEmpty(ctx, cfg, ctx.args.meta.language, item))
_ <- ctx.logger.debug("Storing extracted texts")
_ <- txt.toList.traverse(rm => ctx.store.transact(RAttachmentMeta.upsert(rm)))
dur <- start
_ <- ctx.logger.info(s"Text extraction finished in ${dur.formatExact}")
txt <- item.attachments.traverse(
extractTextIfEmpty(ctx, cfg, ctx.args.meta.language, item)
)
_ <- ctx.logger.debug("Storing extracted texts")
_ <- txt.toList.traverse(rm => ctx.store.transact(RAttachmentMeta.upsert(rm)))
dur <- start
_ <- ctx.logger.info(s"Text extraction finished in ${dur.formatExact}")
} yield item.copy(metas = txt)
}
@ -53,7 +55,10 @@ object TextExtraction {
_ <- ctx.logger.debug(s"Extracting text for attachment ${stripAttachmentName(ra)}")
dst <- Duration.stopTime[F]
txt <- extractTextFallback(ctx, cfg, ra, lang)(filesToExtract(item, ra))
meta = item.changeMeta(ra.id, rm => rm.setContentIfEmpty(txt.map(_.trim).filter(_.nonEmpty)))
meta = item.changeMeta(
ra.id,
rm => rm.setContentIfEmpty(txt.map(_.trim).filter(_.nonEmpty))
)
est <- dst
_ <- ctx.logger.debug(
s"Extracting text for attachment ${stripAttachmentName(ra)} finished in ${est.formatExact}"
@ -76,7 +81,9 @@ object TextExtraction {
.getOrElse(Mimetype.`application/octet-stream`)
findMime
.flatMap(mt => extr.extractText(data, DataType(MimeType(mt.primary, mt.sub, mt.params)), lang))
.flatMap(mt =>
extr.extractText(data, DataType(MimeType(mt.primary, mt.sub, mt.params)), lang)
)
}
private def extractTextFallback[F[_]: Sync: ContextShift](

View File

@ -49,7 +49,9 @@ object JoexRoutes {
case POST -> Root / "job" / Ident(id) / "cancel" =>
for {
flag <- app.scheduler.requestCancel(id)
resp <- Ok(BasicResult(flag, if (flag) "Cancel request submitted" else "Job not found"))
resp <- Ok(
BasicResult(flag, if (flag) "Cancel request submitted" else "Job not found")
)
} yield resp
}
}

View File

@ -16,11 +16,19 @@ import io.circe.Decoder
* convenience constructor that uses circe to decode json into some
* type A.
*/
case class JobTask[F[_]](name: Ident, task: Task[F, String, Unit], onCancel: Task[F, String, Unit])
case class JobTask[F[_]](
name: Ident,
task: Task[F, String, Unit],
onCancel: Task[F, String, Unit]
)
object JobTask {
def json[F[_]: Sync, A](name: Ident, task: Task[F, A, Unit], onCancel: Task[F, A, Unit])(
def json[F[_]: Sync, A](
name: Ident,
task: Task[F, A, Unit],
onCancel: Task[F, A, Unit]
)(
implicit D: Decoder[A]
): JobTask[F] = {
val convert: String => F[A] =

View File

@ -20,7 +20,12 @@ case class LogEvent(
object LogEvent {
def create[F[_]: Sync](jobId: Ident, jobInfo: String, level: LogLevel, msg: String): F[LogEvent] =
def create[F[_]: Sync](
jobId: Ident,
jobInfo: String,
level: LogLevel,
msg: String
): F[LogEvent] =
Timestamp.current[F].map(now => LogEvent(jobId, jobInfo, now, level, msg))
}

View File

@ -42,7 +42,16 @@ object PeriodicScheduler {
for {
waiter <- Resource.liftF(SignallingRef(true))
state <- Resource.liftF(SignallingRef(PeriodicSchedulerImpl.emptyState[F]))
psch = new PeriodicSchedulerImpl[F](cfg, sch, queue, store, client, waiter, state, timer)
psch = new PeriodicSchedulerImpl[F](
cfg,
sch,
queue,
store,
client,
waiter,
state,
timer
)
_ <- Resource.liftF(psch.init)
} yield psch

View File

@ -3,6 +3,6 @@ package docspell.joex.scheduler
import docspell.common._
case class PeriodicSchedulerConfig(
name: Ident,
wakeupPeriod: Duration
name: Ident,
wakeupPeriod: Duration
)

View File

@ -7,7 +7,11 @@ import fs2.concurrent.Queue
object QueueLogger {
def create[F[_]: Sync](jobId: Ident, jobInfo: String, q: Queue[F, LogEvent]): Logger[F] =
def create[F[_]: Sync](
jobId: Ident,
jobInfo: String,
q: Queue[F, LogEvent]
): Logger[F] =
new Logger[F] {
def trace(msg: => String): F[Unit] =
LogEvent.create[F](jobId, jobInfo, LogLevel.Debug, msg).flatMap(q.enqueue1)

View File

@ -38,7 +38,9 @@ case class SchedulerBuilder[F[_]: ConcurrentEffect: ContextShift](
copy(queue = Resource.pure[F, JobQueue[F]](queue))
def serve: Resource[F, Scheduler[F]] =
resource.evalMap(sch => ConcurrentEffect[F].start(sch.start.compile.drain).map(_ => sch))
resource.evalMap(sch =>
ConcurrentEffect[F].start(sch.start.compile.drain).map(_ => sch)
)
def resource: Resource[F, Scheduler[F]] = {
val scheduler = for {
@ -46,7 +48,17 @@ case class SchedulerBuilder[F[_]: ConcurrentEffect: ContextShift](
waiter <- Resource.liftF(SignallingRef(true))
state <- Resource.liftF(SignallingRef(SchedulerImpl.emptyState[F]))
perms <- Resource.liftF(Semaphore(config.poolSize.toLong))
} yield new SchedulerImpl[F](config, blocker, jq, tasks, store, logSink, state, waiter, perms)
} yield new SchedulerImpl[F](
config,
blocker,
jq,
tasks,
store,
logSink,
state,
waiter,
perms
)
scheduler.evalTap(_.init).map(s => s: Scheduler[F])
}

View File

@ -50,7 +50,8 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
def requestCancel(jobId: Ident): F[Boolean] =
state.get.flatMap(_.cancelRequest(jobId) match {
case Some(ct) => ct.map(_ => true)
case None => logger.fwarn(s"Job ${jobId.id} not found, cannot cancel.").map(_ => false)
case None =>
logger.fwarn(s"Job ${jobId.id} not found, cannot cancel.").map(_ => false)
})
def notifyChange: F[Unit] =
@ -67,12 +68,15 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
.eval(runShutdown)
.evalMap(_ => logger.finfo("Scheduler is shutting down now."))
.flatMap(_ =>
Stream.eval(state.get) ++ Stream.suspend(state.discrete.takeWhile(_.getRunning.nonEmpty))
Stream.eval(state.get) ++ Stream
.suspend(state.discrete.takeWhile(_.getRunning.nonEmpty))
)
.flatMap { state =>
if (state.getRunning.isEmpty) Stream.eval(logger.finfo("No jobs running."))
else
Stream.eval(logger.finfo(s"Waiting for ${state.getRunning.size} jobs to finish.")) ++
Stream.eval(
logger.finfo(s"Waiting for ${state.getRunning.size} jobs to finish.")
) ++
Stream.emit(state)
}
@ -86,11 +90,14 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
def mainLoop: Stream[F, Nothing] = {
val body: F[Boolean] =
for {
_ <- permits.available.flatMap(a => logger.fdebug(s"Try to acquire permit ($a free)"))
_ <- permits.available.flatMap(a =>
logger.fdebug(s"Try to acquire permit ($a free)")
)
_ <- permits.acquire
_ <- logger.fdebug("New permit acquired")
down <- state.get.map(_.shutdownRequest)
rjob <- if (down) logger.finfo("") *> permits.release *> (None: Option[RJob]).pure[F]
rjob <- if (down)
logger.finfo("") *> permits.release *> (None: Option[RJob]).pure[F]
else
queue.nextJob(
group => state.modify(_.nextPrio(group, config.countingScheme)),
@ -151,7 +158,11 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
} yield ()
def onStart(job: RJob): F[Unit] =
QJob.setRunning(job.id, config.name, store) //also increments retries if current state=stuck
QJob.setRunning(
job.id,
config.name,
store
) //also increments retries if current state=stuck
def wrapTask(
job: RJob,
@ -159,7 +170,9 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
ctx: Context[F, String]
): Task[F, String, Unit] =
task
.mapF(fa => onStart(job) *> logger.fdebug("Starting task now") *> blocker.blockOn(fa))
.mapF(fa =>
onStart(job) *> logger.fdebug("Starting task now") *> blocker.blockOn(fa)
)
.mapF(_.attempt.flatMap({
case Right(()) =>
logger.info(s"Job execution successful: ${job.info}")
@ -196,7 +209,12 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
onFinish(job, JobState.Stuck)
})
def forkRun(job: RJob, code: F[Unit], onCancel: F[Unit], ctx: Context[F, String]): F[F[Unit]] = {
def forkRun(
job: RJob,
code: F[Unit],
onCancel: F[Unit],
ctx: Context[F, String]
): F[F[Unit]] = {
val bfa = blocker.blockOn(code)
logger.fdebug(s"Forking job ${job.info}") *>
ConcurrentEffect[F]
@ -236,10 +254,16 @@ object SchedulerImpl {
}
def addRunning(job: RJob, token: CancelToken[F]): (State[F], Unit) =
(State(counters, cancelled, cancelTokens.updated(job.id, token), shutdownRequest), ())
(
State(counters, cancelled, cancelTokens.updated(job.id, token), shutdownRequest),
()
)
def removeRunning(job: RJob): (State[F], Unit) =
(copy(cancelled = cancelled - job.id, cancelTokens = cancelTokens.removed(job.id)), ())
(
copy(cancelled = cancelled - job.id, cancelTokens = cancelTokens.removed(job.id)),
()
)
def markCancelled(job: RJob): (State[F], Unit) =
(copy(cancelled = cancelled + job.id), ())

View File

@ -25,11 +25,13 @@ trait Task[F[_], A, B] {
def mapF[C](f: F[B] => F[C]): Task[F, A, C] =
Task(Task.toKleisli(this).mapF(f))
def attempt(implicit F: ApplicativeError[F, Throwable]): Task[F, A, Either[Throwable, B]] =
def attempt(
implicit F: ApplicativeError[F, Throwable]
): Task[F, A, Either[Throwable, B]] =
mapF(_.attempt)
def contramap[C](f: C => F[A])(implicit F: FlatMap[F]): Task[F, C, B] = { ctxc: Context[F, C] =>
f(ctxc.args).flatMap(a => run(ctxc.map(_ => a)))
def contramap[C](f: C => F[A])(implicit F: FlatMap[F]): Task[F, C, B] = {
ctxc: Context[F, C] => f(ctxc.args).flatMap(a => run(ctxc.map(_ => a)))
}
}

View File

@ -18,9 +18,15 @@ case class Config(
object Config {
val postgres =
JdbcConfig(LenientUri.unsafe("jdbc:postgresql://localhost:5432/docspelldev"), "dev", "dev")
JdbcConfig(
LenientUri.unsafe("jdbc:postgresql://localhost:5432/docspelldev"),
"dev",
"dev"
)
val h2 = JdbcConfig(
LenientUri.unsafe("jdbc:h2:./target/docspelldev.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE"),
LenientUri.unsafe(
"jdbc:h2:./target/docspelldev.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE"
),
"sa",
""
)

View File

@ -12,8 +12,10 @@ import org.log4s._
object Main extends IOApp {
private[this] val logger = getLogger
val blockingEC = ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-restserver-blocking"))
val connectEC = ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-dbconnect"))
val blockingEC =
ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-restserver-blocking"))
val connectEC =
ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-dbconnect"))
def run(args: List[String]) = {
args match {
@ -53,9 +55,17 @@ object Main extends IOApp {
logger.info(s"\n${banner.render("***>")}")
pools.use(p =>
RestServer.stream[IO](cfg, p.connectEC, p.clientEC, p.blocker).compile.drain.as(ExitCode.Success)
RestServer
.stream[IO](cfg, p.connectEC, p.clientEC, p.blocker)
.compile
.drain
.as(ExitCode.Success)
)
}
case class Pools(connectEC: ExecutionContext, clientEC: ExecutionContext, blocker: Blocker)
case class Pools(
connectEC: ExecutionContext,
clientEC: ExecutionContext,
blocker: Blocker
)
}

View File

@ -91,7 +91,8 @@ trait Conversions {
)
def mkAttachment(item: OItem.ItemData)(ra: RAttachment, m: FileMeta): Attachment = {
val converted = item.sources.find(_._1.id == ra.id).exists(_._2.checksum != m.checksum)
val converted =
item.sources.find(_._1.id == ra.id).exists(_._2.checksum != m.checksum)
Attachment(ra.id, ra.name, m.length, MimeType.unsafe(m.mimetype.asString), converted)
}
@ -107,7 +108,8 @@ trait Conversions {
OItem.Query(
coll,
m.name,
if (m.inbox) Seq(ItemState.Created) else Seq(ItemState.Created, ItemState.Confirmed),
if (m.inbox) Seq(ItemState.Created)
else Seq(ItemState.Created, ItemState.Confirmed),
m.direction,
m.corrPerson,
m.corrOrg,
@ -127,7 +129,8 @@ trait Conversions {
def mkGroup(g: (String, Vector[OItem.ListItem])): ItemLightGroup =
ItemLightGroup(g._1, g._2.map(mkItemLight).toList)
val gs = groups.map(mkGroup _).toList.sortWith((g1, g2) => g1.name.compareTo(g2.name) >= 0)
val gs =
groups.map(mkGroup _).toList.sortWith((g1, g2) => g1.name.compareTo(g2.name) >= 0)
ItemLightList(gs)
}
@ -203,13 +206,16 @@ trait Conversions {
val meta: F[(Boolean, UploadMeta)] = mp.parts
.find(_.name.exists(_.equalsIgnoreCase("meta")))
.map(p => parseMeta(p.body))
.map(fm => fm.map(m => (m.multiple, UploadMeta(m.direction, "webapp", validFileTypes))))
.map(fm =>
fm.map(m => (m.multiple, UploadMeta(m.direction, "webapp", validFileTypes)))
)
.getOrElse((true, UploadMeta(None, "webapp", validFileTypes)).pure[F])
val files = mp.parts
.filter(p => p.name.forall(s => !s.equalsIgnoreCase("meta")))
.map(p =>
OUpload.File(p.filename, p.headers.get(`Content-Type`).map(fromContentType), p.body)
OUpload
.File(p.filename, p.headers.get(`Content-Type`).map(fromContentType), p.body)
)
for {
metaData <- meta
@ -252,7 +258,10 @@ trait Conversions {
} yield OOrganization.OrgAndContacts(org, cont)
}
def changeOrg[F[_]: Sync](v: Organization, cid: Ident): F[OOrganization.OrgAndContacts] = {
def changeOrg[F[_]: Sync](
v: Organization,
cid: Ident
): F[OOrganization.OrgAndContacts] = {
def contacts(oid: Ident) =
v.contacts.traverse(c => newContact(c, oid.some, None))
for {
@ -306,7 +315,10 @@ trait Conversions {
} yield OOrganization.PersonAndContacts(org, cont)
}
def changePerson[F[_]: Sync](v: Person, cid: Ident): F[OOrganization.PersonAndContacts] = {
def changePerson[F[_]: Sync](
v: Person,
cid: Ident
): F[OOrganization.PersonAndContacts] = {
def contacts(pid: Ident) =
v.contacts.traverse(c => newContact(c, None, pid.some))
for {
@ -330,7 +342,11 @@ trait Conversions {
def mkContact(rc: RContact): Contact =
Contact(rc.contactId, rc.value, rc.kind)
def newContact[F[_]: Sync](c: Contact, oid: Option[Ident], pid: Option[Ident]): F[RContact] =
def newContact[F[_]: Sync](
c: Contact,
oid: Option[Ident],
pid: Option[Ident]
): F[RContact] =
timeId.map {
case (id, now) =>
RContact(id, c.value, c.kind, pid, oid, now)
@ -395,7 +411,16 @@ trait Conversions {
})
def changeSource[F[_]: Sync](s: Source, coll: Ident): RSource =
RSource(s.id, coll, s.abbrev, s.description, s.counter, s.enabled, s.priority, s.created)
RSource(
s.id,
coll,
s.abbrev,
s.description,
s.counter,
s.enabled,
s.priority,
s.created
)
// equipment
def mkEquipment(re: REquipment): Equipment =
@ -422,7 +447,8 @@ trait Conversions {
case JobCancelResult.JobNotFound => BasicResult(false, "Job not found")
case JobCancelResult.CancelRequested =>
BasicResult(true, "Cancel was requested at the job executor")
case JobCancelResult.Removed => BasicResult(true, "The job has been removed from the queue.")
case JobCancelResult.Removed =>
BasicResult(true, "The job has been removed from the queue.")
}
def basicResult(ar: AddResult, successMsg: String): BasicResult = ar match {
@ -438,8 +464,9 @@ trait Conversions {
}
def basicResult(cr: PassChangeResult): BasicResult = cr match {
case PassChangeResult.Success => BasicResult(true, "Password changed.")
case PassChangeResult.UpdateFailed => BasicResult(false, "The database update failed.")
case PassChangeResult.Success => BasicResult(true, "Password changed.")
case PassChangeResult.UpdateFailed =>
BasicResult(false, "The database update failed.")
case PassChangeResult.PasswordMismatch =>
BasicResult(false, "The current password is incorrect.")
case PassChangeResult.UserNotFound => BasicResult(false, "User not found.")
@ -448,7 +475,11 @@ trait Conversions {
// MIME Type
def fromContentType(header: `Content-Type`): MimeType =
MimeType(header.mediaType.mainType, header.mediaType.subType, header.mediaType.extensions)
MimeType(
header.mediaType.mainType,
header.mediaType.subType,
header.mediaType.extensions
)
}
object Conversions extends Conversions {

View File

@ -23,7 +23,9 @@ object AttachmentRoutes {
val dsl = new Http4sDsl[F] {}
import dsl._
def withResponseHeaders(resp: F[Response[F]])(data: OItem.BinaryData[F]): F[Response[F]] = {
def withResponseHeaders(
resp: F[Response[F]]
)(data: OItem.BinaryData[F]): F[Response[F]] = {
val mt = MediaType.unsafeParse(data.meta.mimetype.asString)
val ctype = `Content-Type`(mt)
val cntLen: Header = `Content-Length`.unsafeFromLong(data.meta.length)
@ -104,7 +106,6 @@ object AttachmentRoutes {
.getOrElse(NotFound(BasicResult(false, "Not found")))
} yield resp
case GET -> Root / Ident(id) / "view" =>
// this route exists to provide a stable url
// it redirects currently to viewerjs

View File

@ -44,7 +44,8 @@ object CheckFileRoutes {
private def convert(v: Vector[RItem]): CheckFileResult =
CheckFileResult(
v.nonEmpty,
v.map(r => BasicItem(r.id, r.name, r.direction, r.state, r.created, r.itemDate)).toList
v.map(r => BasicItem(r.id, r.name, r.direction, r.state, r.created, r.itemDate))
.toList
)
}

View File

@ -28,8 +28,9 @@ object CollectiveRoutes {
case req @ POST -> Root / "settings" =>
for {
settings <- req.as[CollectiveSettings]
res <- backend.collective.updateLanguage(user.account.collective, settings.language)
resp <- Ok(Conversions.basicResult(res, "Language updated."))
res <- backend.collective
.updateLanguage(user.account.collective, settings.language)
resp <- Ok(Conversions.basicResult(res, "Language updated."))
} yield resp
case GET -> Root / "settings" =>
@ -39,7 +40,8 @@ object CollectiveRoutes {
resp <- sett.toResponse()
} yield resp
case GET -> Root / "contacts" :? QueryParam.QueryOpt(q) +& QueryParam.ContactKindOpt(kind) =>
case GET -> Root / "contacts" :? QueryParam.QueryOpt(q) +& QueryParam
.ContactKindOpt(kind) =>
for {
res <- backend.collective
.getContacts(user.account.collective, q.map(_.q), kind)

View File

@ -36,7 +36,9 @@ object ItemRoutes {
for {
item <- backend.item.findItem(id, user.account.collective)
result = item.map(Conversions.mkItemDetail)
resp <- result.map(r => Ok(r)).getOrElse(NotFound(BasicResult(false, "Not found.")))
resp <- result
.map(r => Ok(r))
.getOrElse(NotFound(BasicResult(false, "Not found.")))
} yield resp
case POST -> Root / Ident(id) / "confirm" =>
@ -103,7 +105,11 @@ object ItemRoutes {
case req @ POST -> Root / Ident(id) / "name" =>
for {
text <- req.as[OptionalText]
res <- backend.item.setName(id, text.text.notEmpty.getOrElse(""), user.account.collective)
res <- backend.item.setName(
id,
text.text.notEmpty.getOrElse(""),
user.account.collective
)
resp <- Ok(Conversions.basicResult(res, "Name updated"))
} yield resp

View File

@ -40,7 +40,8 @@ object MailSendRoutes {
for {
rec <- s.recipients.traverse(MailAddress.parse)
fileIds <- s.attachmentIds.traverse(Ident.fromString)
sel = if (s.addAllAttachments) AttachSelection.All else AttachSelection.Selected(fileIds)
sel = if (s.addAllAttachments) AttachSelection.All
else AttachSelection.Selected(fileIds)
} yield ItemMail(item, s.subject, rec, s.body, sel)
def convertOut(res: SendResult): BasicResult =
@ -50,7 +51,10 @@ object MailSendRoutes {
case SendResult.SendFailure(ex) =>
BasicResult(false, s"Mail sending failed: ${ex.getMessage}")
case SendResult.StoreFailure(ex) =>
BasicResult(false, s"Mail was sent, but could not be store to database: ${ex.getMessage}")
BasicResult(
false,
s"Mail was sent, but could not be store to database: ${ex.getMessage}"
)
case SendResult.NotFound =>
BasicResult(false, s"There was no mail-connection or item found.")
}

View File

@ -43,7 +43,9 @@ object MailSettingsRoutes {
(for {
in <- OptionT.liftF(req.as[EmailSettings])
ru = makeSettings(in)
up <- OptionT.liftF(ru.traverse(r => backend.mail.createSettings(user.account, r)))
up <- OptionT.liftF(
ru.traverse(r => backend.mail.createSettings(user.account, r))
)
resp <- OptionT.liftF(
Ok(
up.fold(
@ -58,7 +60,9 @@ object MailSettingsRoutes {
(for {
in <- OptionT.liftF(req.as[EmailSettings])
ru = makeSettings(in)
up <- OptionT.liftF(ru.traverse(r => backend.mail.updateSettings(user.account, name, r)))
up <- OptionT.liftF(
ru.traverse(r => backend.mail.updateSettings(user.account, name, r))
)
resp <- OptionT.liftF(
Ok(
up.fold(

View File

@ -19,7 +19,11 @@ import org.log4s._
object UploadRoutes {
private[this] val logger = getLogger
def secured[F[_]: Effect](backend: BackendApp[F], cfg: Config, user: AuthToken): HttpRoutes[F] = {
def secured[F[_]: Effect](
backend: BackendApp[F],
cfg: Config,
user: AuthToken
): HttpRoutes[F] = {
val dsl = new Http4sDsl[F] with ResponseGenerator[F] {}
import dsl._
@ -51,9 +55,14 @@ object UploadRoutes {
case req @ POST -> Root / "item" / Ident(id) =>
for {
multipart <- req.as[Multipart[F]]
updata <- readMultipart(multipart, logger, Priority.Low, cfg.backend.files.validMimeTypes)
result <- backend.upload.submit(updata, id)
res <- Ok(basicResult(result))
updata <- readMultipart(
multipart,
logger,
Priority.Low,
cfg.backend.files.validMimeTypes
)
result <- backend.upload.submit(updata, id)
res <- Ok(basicResult(result))
} yield res
case GET -> Root / "checkfile" / Ident(id) / checksum =>

View File

@ -29,8 +29,10 @@ object TemplateRoutes {
def apply[F[_]: Effect](blocker: Blocker, cfg: Config)(
implicit C: ContextShift[F]
): InnerRoutes[F] = {
val indexTemplate = memo(loadResource("/index.html").flatMap(loadTemplate(_, blocker)))
val docTemplate = memo(loadResource("/doc.html").flatMap(loadTemplate(_, blocker)))
val indexTemplate = memo(
loadResource("/index.html").flatMap(loadTemplate(_, blocker))
)
val docTemplate = memo(loadResource("/doc.html").flatMap(loadTemplate(_, blocker)))
val dsl = new Http4sDsl[F] {}
import dsl._
@ -62,7 +64,9 @@ object TemplateRoutes {
r.pure[F]
}
def loadUrl[F[_]: Sync](url: URL, blocker: Blocker)(implicit C: ContextShift[F]): F[String] =
def loadUrl[F[_]: Sync](url: URL, blocker: Blocker)(
implicit C: ContextShift[F]
): F[String] =
Stream
.bracket(Sync[F].delay(url.openStream))(in => Sync[F].delay(in.close()))
.flatMap(in => io.readInputStream(in.pure[F], 64 * 1024, blocker, false))

View File

@ -9,7 +9,9 @@ import org.http4s.server.staticcontent.WebjarService.{WebjarAsset, Config => Web
object WebjarRoutes {
def appRoutes[F[_]: Effect](blocker: Blocker)(implicit C: ContextShift[F]): HttpRoutes[F] =
def appRoutes[F[_]: Effect](
blocker: Blocker
)(implicit C: ContextShift[F]): HttpRoutes[F] =
webjarService(
WebjarConfig(
filter = assetFilter,

View File

@ -15,7 +15,10 @@ sealed trait AddResult {
object AddResult {
def fromUpdate(e: Either[Throwable, Int]): AddResult =
e.fold(Failure, n => if (n > 0) Success else Failure(new Exception("No rows updated")))
e.fold(
Failure,
n => if (n > 0) Success else Failure(new Exception("No rows updated"))
)
case object Success extends AddResult {
def toEither = Right(())

View File

@ -88,7 +88,6 @@ trait DoobieMeta extends EmilDoobieMeta {
implicit val metaLanguage: Meta[Language] =
Meta[String].imap(Language.unsafe)(_.iso3)
implicit val metaCalEvent: Meta[CalEvent] =
Meta[String].timap(CalEvent.unsafe)(_.asString)
}

View File

@ -27,7 +27,9 @@ trait DoobieSyntax {
and(f0 :: fs.toList)
def or(fs: Seq[Fragment]): Fragment =
Fragment.const(" (") ++ fs.reduce(_ ++ Fragment.const(" OR ") ++ _) ++ Fragment.const(") ")
Fragment.const(" (") ++ fs.reduce(_ ++ Fragment.const(" OR ") ++ _) ++ Fragment.const(
") "
)
def or(f0: Fragment, fs: Fragment*): Fragment =
or(f0 :: fs.toList)
@ -42,7 +44,9 @@ trait DoobieSyntax {
fr"ORDER BY" ++ commas(c0 :: cs.toList)
def updateRow(table: Fragment, where: Fragment, setter: Fragment): Fragment =
Fragment.const("UPDATE ") ++ table ++ Fragment.const(" SET ") ++ setter ++ this.where(where)
Fragment.const("UPDATE ") ++ table ++ Fragment.const(" SET ") ++ setter ++ this.where(
where
)
def insertRow(table: Fragment, cols: List[Column], vals: Fragment): Fragment =
Fragment.const("INSERT INTO ") ++ table ++ Fragment.const(" (") ++
@ -66,15 +70,17 @@ trait DoobieSyntax {
Fragment.const(") FROM ") ++ table ++ this.where(where)
def selectCount(col: Column, table: Fragment, where: Fragment): Fragment =
Fragment.const("SELECT COUNT(") ++ col.f ++ Fragment.const(") FROM ") ++ table ++ this.where(
where
)
Fragment.const("SELECT COUNT(") ++ col.f ++ Fragment.const(") FROM ") ++ table ++ this
.where(
where
)
def deleteFrom(table: Fragment, where: Fragment): Fragment =
fr"DELETE FROM" ++ table ++ this.where(where)
def withCTE(ps: (String, Fragment)*): Fragment = {
val subsel: Seq[Fragment] = ps.map(p => Fragment.const(p._1) ++ fr"AS (" ++ p._2 ++ fr")")
val subsel: Seq[Fragment] =
ps.map(p => Fragment.const(p._1) ++ fr"AS (" ++ p._2 ++ fr")")
fr"WITH" ++ commas(subsel)
}

View File

@ -9,9 +9,15 @@ import docspell.store.{AddResult, JdbcConfig, Store}
import doobie._
import doobie.implicits._
final class StoreImpl[F[_]: Effect](jdbc: JdbcConfig, xa: Transactor[F]) extends Store[F] {
final class StoreImpl[F[_]: Effect](jdbc: JdbcConfig, xa: Transactor[F])
extends Store[F] {
val bitpeaceCfg =
BitpeaceConfig("filemeta", "filechunk", TikaMimetypeDetect, Ident.randomId[F].map(_.id))
BitpeaceConfig(
"filemeta",
"filechunk",
TikaMimetypeDetect,
Ident.randomId[F].map(_.id)
)
def migrate: F[Int] =
FlywayMigrate.run[F](jdbc)

View File

@ -15,7 +15,9 @@ object FlywayMigrate {
val name = if (dbtype == "h2") "postgresql" else dbtype
List(s"classpath:db/migration/${name}")
case None =>
logger.warn(s"Cannot read database name from jdbc url: ${jdbc.url}. Go with PostgreSQL")
logger.warn(
s"Cannot read database name from jdbc url: ${jdbc.url}. Go with PostgreSQL"
)
List("classpath:db/postgresql")
}

View File

@ -52,7 +52,7 @@ object QAttachment {
.foldMonoid
} yield n + f
def deleteArchive[F[_]: Sync](store: Store[F])(attachId: Ident): F[Int] = {
def deleteArchive[F[_]: Sync](store: Store[F])(attachId: Ident): F[Int] =
(for {
aa <- OptionT(store.transact(RAttachmentArchive.findById(attachId)))
n <- OptionT.liftF(store.transact(RAttachmentArchive.deleteAll(aa.fileId)))
@ -64,7 +64,6 @@ object QAttachment {
.drain
)
} yield n).getOrElse(0)
}
def deleteItemAttachments[F[_]: Sync](
store: Store[F]

View File

@ -10,7 +10,12 @@ import docspell.common.ContactKind
object QCollective {
case class InsightData(incoming: Int, outgoing: Int, bytes: Long, tags: Map[String, Int])
case class InsightData(
incoming: Int,
outgoing: Int,
bytes: Long,
tags: Map[String, Int]
)
def getInsights(coll: Ident): ConnectionIO[InsightData] = {
val IC = RItem.Columns
@ -49,7 +54,9 @@ object QCollective {
fr"count(" ++ RC.itemId.prefix("r").f ++ fr")"
) ++
fr"FROM" ++ RTagItem.table ++ fr"r" ++
fr"INNER JOIN" ++ RTag.table ++ fr"t ON" ++ RC.tagId.prefix("r").is(TC.tid.prefix("t")) ++
fr"INNER JOIN" ++ RTag.table ++ fr"t ON" ++ RC.tagId
.prefix("r")
.is(TC.tid.prefix("t")) ++
fr"WHERE" ++ TC.cid.prefix("t").is(coll) ++
fr"GROUP BY" ++ TC.name.prefix("t").f

View File

@ -24,8 +24,8 @@ object QItem {
inReplyTo: Option[IdRef],
tags: Vector[RTag],
attachments: Vector[(RAttachment, FileMeta)],
sources: Vector[(RAttachmentSource, FileMeta)],
archives: Vector[(RAttachmentArchive, FileMeta)]
sources: Vector[(RAttachmentSource, FileMeta)],
archives: Vector[(RAttachmentArchive, FileMeta)]
) {
def filterCollective(coll: Ident): Option[ItemData] =
@ -75,8 +75,8 @@ object QItem {
)
]
.option
val attachs = RAttachment.findByItemWithMeta(id)
val sources = RAttachmentSource.findByItemWithMeta(id)
val attachs = RAttachment.findByItemWithMeta(id)
val sources = RAttachmentSource.findByItemWithMeta(id)
val archives = RAttachmentArchive.findByItemWithMeta(id)
val tags = RTag.findByItem(id)
@ -87,7 +87,9 @@ object QItem {
srcs <- sources
arch <- archives
ts <- tags
} yield data.map(d => ItemData(d._1, d._2, d._3, d._4, d._5, d._6, ts, att, srcs, arch))
} yield data.map(d =>
ItemData(d._1, d._2, d._3, d._4, d._5, d._6, ts, att, srcs, arch)
)
}
case class ListItem(

View File

@ -17,13 +17,19 @@ object QJob {
def takeNextJob[F[_]: Effect](
store: Store[F]
)(priority: Ident => F[Priority], worker: Ident, retryPause: Duration): F[Option[RJob]] =
)(
priority: Ident => F[Priority],
worker: Ident,
retryPause: Duration
): F[Option[RJob]] =
Stream
.range(0, 10)
.evalMap(n => takeNextJob1(store)(priority, worker, retryPause, n))
.evalTap { x =>
if (x.isLeft)
logger.fdebug[F]("Cannot mark job, probably due to concurrent updates. Will retry.")
logger.fdebug[F](
"Cannot mark job, probably due to concurrent updates. Will retry."
)
else ().pure[F]
}
.find(_.isRight)
@ -54,7 +60,9 @@ object QJob {
} yield if (n == 1) Right(job) else Left(()))
for {
_ <- logger.ftrace[F](s"About to take next job (worker ${worker.id}), try $currentTry")
_ <- logger.ftrace[F](
s"About to take next job (worker ${worker.id}), try $currentTry"
)
now <- Timestamp.current[F]
group <- store.transact(selectNextGroup(worker, now, retryPause))
_ <- logger.ftrace[F](s"Choose group ${group.map(_.id)}")
@ -66,7 +74,8 @@ object QJob {
_ <- logger.ftrace[F](s"Found job: ${job.map(_.info)}")
res <- job.traverse(j => markJob(j))
} yield res.map(_.map(_.some)).getOrElse {
if (group.isDefined) Left(()) // if a group was found, but no job someone else was faster
if (group.isDefined)
Left(()) // if a group was found, but no job someone else was faster
else Right(None)
}
}
@ -103,7 +112,9 @@ object QJob {
union
.query[Ident]
.to[List]
.map(_.headOption) // either one or two results, but may be empty if RJob table is empty
.map(
_.headOption
) // either one or two results, but may be empty if RJob table is empty
}
def selectNextJob(
@ -119,15 +130,19 @@ object QJob {
val waiting: JobState = JobState.Waiting
val stuck: JobState = JobState.Stuck
val stuckTrigger = coalesce(JC.startedmillis.f, sql"${now.toMillis}") ++ fr"+" ++ power2(
JC.retries
) ++ fr"* ${initialPause.millis}"
val stuckTrigger =
coalesce(JC.startedmillis.f, sql"${now.toMillis}") ++ fr"+" ++ power2(
JC.retries
) ++ fr"* ${initialPause.millis}"
val sql = selectSimple(
JC.all,
RJob.table,
and(
JC.group.is(group),
or(JC.state.is(waiting), and(JC.state.is(stuck), stuckTrigger ++ fr"< ${now.toMillis}"))
or(
JC.state.is(waiting),
and(JC.state.is(stuck), stuckTrigger ++ fr"< ${now.toMillis}")
)
)
) ++
orderBy(JC.state.asc, psort, JC.submitted.asc) ++
@ -189,7 +204,9 @@ object QJob {
def findAll[F[_]: Effect](ids: Seq[Ident], store: Store[F]): F[Vector[RJob]] =
store.transact(RJob.findFromIds(ids))
def queueStateSnapshot(collective: Ident): Stream[ConnectionIO, (RJob, Vector[RJobLog])] = {
def queueStateSnapshot(
collective: Ident
): Stream[ConnectionIO, (RJob, Vector[RJobLog])] = {
val JC = RJob.Columns
val waiting: Set[JobState] = Set(JobState.Waiting, JobState.Stuck, JobState.Scheduled)
val running: Set[JobState] = Set(JobState.Running)

View File

@ -15,7 +15,11 @@ trait JobQueue[F[_]] {
def insertAll(jobs: Seq[RJob]): F[Unit]
def nextJob(prio: Ident => F[Priority], worker: Ident, retryPause: Duration): F[Option[RJob]]
def nextJob(
prio: Ident => F[Priority],
worker: Ident,
retryPause: Duration
): F[Option[RJob]]
}
object JobQueue {
@ -29,14 +33,16 @@ object JobQueue {
worker: Ident,
retryPause: Duration
): F[Option[RJob]] =
logger.ftrace("Select next job") *> QJob.takeNextJob(store)(prio, worker, retryPause)
logger
.ftrace("Select next job") *> QJob.takeNextJob(store)(prio, worker, retryPause)
def insert(job: RJob): F[Unit] =
store
.transact(RJob.insert(job))
.flatMap { n =>
if (n != 1)
Effect[F].raiseError(new Exception(s"Inserting job failed. Update count: $n"))
Effect[F]
.raiseError(new Exception(s"Inserting job failed. Update count: $n"))
else ().pure[F]
}

View File

@ -10,8 +10,7 @@ object Marked {
final case object NotMarkable extends Marked[Nothing]
def found[A](v: A): Marked[A] = Found(v)
def notFound[A]: Marked[A] = NotFound
def notFound[A]: Marked[A] = NotFound
def notMarkable[A]: Marked[A] = NotMarkable
}

View File

@ -38,7 +38,11 @@ object RAttachment {
fr"${v.id},${v.itemId},${v.fileId.id},${v.position},${v.created},${v.name}"
).update.run
def updateFileIdAndName(attachId: Ident, fId: Ident, fname: Option[String]): ConnectionIO[Int] =
def updateFileIdAndName(
attachId: Ident,
fId: Ident,
fname: Option[String]
): ConnectionIO[Int] =
updateRow(table, id.is(attachId), commas(fileId.setTo(fId), name.setTo(fname))).update.run
def updatePosition(attachId: Ident, pos: Int): ConnectionIO[Int] =
@ -55,13 +59,17 @@ object RAttachment {
val aFileMeta = fileId.prefix("a")
val mId = RFileMeta.Columns.id.prefix("m")
val from = table ++ fr"a INNER JOIN" ++ RFileMeta.table ++ fr"m ON" ++ aFileMeta.is(mId)
val from =
table ++ fr"a INNER JOIN" ++ RFileMeta.table ++ fr"m ON" ++ aFileMeta.is(mId)
val cond = aId.is(attachId)
selectSimple(cols, from, cond).query[FileMeta].option
}
def findByIdAndCollective(attachId: Ident, collective: Ident): ConnectionIO[Option[RAttachment]] =
def findByIdAndCollective(
attachId: Ident,
collective: Ident
): ConnectionIO[Option[RAttachment]] =
selectSimple(
all.map(_.prefix("a")),
table ++ fr"a," ++ RItem.table ++ fr"i",
@ -75,7 +83,10 @@ object RAttachment {
def findByItem(id: Ident): ConnectionIO[Vector[RAttachment]] =
selectSimple(all, table, itemId.is(id)).query[RAttachment].to[Vector]
def findByItemAndCollective(id: Ident, coll: Ident): ConnectionIO[Vector[RAttachment]] = {
def findByItemAndCollective(
id: Ident,
coll: Ident
): ConnectionIO[Vector[RAttachment]] = {
val q = selectSimple(all.map(_.prefix("a")), table ++ fr"a", Fragment.empty) ++
fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ RItem.Columns.id
.prefix("i")
@ -97,8 +108,9 @@ object RAttachment {
val iId = RItem.Columns.id.prefix("i")
val iColl = RItem.Columns.cid.prefix("i")
val from = table ++ fr"a INNER JOIN" ++ RFileMeta.table ++ fr"m ON" ++ afileMeta.is(mId) ++
fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ aItem.is(iId)
val from =
table ++ fr"a INNER JOIN" ++ RFileMeta.table ++ fr"m ON" ++ afileMeta.is(mId) ++
fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ aItem.is(iId)
val cond = Seq(aItem.is(id), iColl.is(coll))
selectSimple(cols, from, and(cond)).query[(RAttachment, FileMeta)].to[Vector]

View File

@ -23,11 +23,11 @@ object RAttachmentArchive {
val table = fr"attachment_archive"
object Columns {
val id = Column("id")
val fileId = Column("file_id")
val name = Column("filename")
val id = Column("id")
val fileId = Column("file_id")
val name = Column("filename")
val messageId = Column("message_id")
val created = Column("created")
val created = Column("created")
val all = List(id, fileId, name, messageId, created)
}

View File

@ -20,7 +20,8 @@ case class RAttachmentMeta(
}
object RAttachmentMeta {
def empty(attachId: Ident) = RAttachmentMeta(attachId, None, Nil, MetaProposalList.empty)
def empty(attachId: Ident) =
RAttachmentMeta(attachId, None, Nil, MetaProposalList.empty)
val table = fr"attachmentmeta"

View File

@ -63,7 +63,9 @@ object RAttachmentSource {
selectSimple(all.map(_.prefix("a")), from, where).query[RAttachmentSource].option
}
def findByItemWithMeta(id: Ident): ConnectionIO[Vector[(RAttachmentSource, FileMeta)]] = {
def findByItemWithMeta(
id: Ident
): ConnectionIO[Vector[(RAttachmentSource, FileMeta)]] = {
import bitpeace.sql._
val aId = Columns.id.prefix("a")

View File

@ -7,7 +7,12 @@ import doobie._
import doobie.implicits._
import fs2.Stream
case class RCollective(id: Ident, state: CollectiveState, language: Language, created: Timestamp)
case class RCollective(
id: Ident,
state: CollectiveState,
language: Language,
created: Timestamp
)
object RCollective {

View File

@ -40,7 +40,9 @@ object RInvitation {
deleteFrom(table, id.is(invite)).update.run
def useInvite(invite: Ident, minCreated: Timestamp): ConnectionIO[Boolean] = {
val get = selectCount(id, table, and(id.is(invite), created.isGt(minCreated))).query[Int].unique
val get = selectCount(id, table, and(id.is(invite), created.isGt(minCreated)))
.query[Int]
.unique
for {
inv <- get
_ <- delete(invite)

View File

@ -113,7 +113,11 @@ object RItem {
def updateState(itemId: Ident, itemState: ItemState): ConnectionIO[Int] =
for {
t <- currentTime
n <- updateRow(table, id.is(itemId), commas(state.setTo(itemState), updated.setTo(t))).update.run
n <- updateRow(
table,
id.is(itemId),
commas(state.setTo(itemState), updated.setTo(t))
).update.run
} yield n
def updateStateForCollective(
@ -160,7 +164,11 @@ object RItem {
).update.run
} yield n
def updateCorrPerson(itemId: Ident, coll: Ident, person: Option[Ident]): ConnectionIO[Int] =
def updateCorrPerson(
itemId: Ident,
coll: Ident,
person: Option[Ident]
): ConnectionIO[Int] =
for {
t <- currentTime
n <- updateRow(
@ -180,7 +188,11 @@ object RItem {
).update.run
} yield n
def updateConcPerson(itemId: Ident, coll: Ident, person: Option[Ident]): ConnectionIO[Int] =
def updateConcPerson(
itemId: Ident,
coll: Ident,
person: Option[Ident]
): ConnectionIO[Int] =
for {
t <- currentTime
n <- updateRow(
@ -200,7 +212,11 @@ object RItem {
).update.run
} yield n
def updateConcEquip(itemId: Ident, coll: Ident, equip: Option[Ident]): ConnectionIO[Int] =
def updateConcEquip(
itemId: Ident,
coll: Ident,
equip: Option[Ident]
): ConnectionIO[Int] =
for {
t <- currentTime
n <- updateRow(
@ -250,7 +266,11 @@ object RItem {
).update.run
} yield n
def updateDueDate(itemId: Ident, coll: Ident, date: Option[Timestamp]): ConnectionIO[Int] =
def updateDueDate(
itemId: Ident,
coll: Ident,
date: Option[Timestamp]
): ConnectionIO[Int] =
for {
t <- currentTime
n <- updateRow(

View File

@ -6,7 +6,13 @@ import docspell.common._
import docspell.store.impl.Column
import docspell.store.impl.Implicits._
case class RJobLog(id: Ident, jobId: Ident, level: LogLevel, created: Timestamp, message: String) {}
case class RJobLog(
id: Ident,
jobId: Ident,
level: LogLevel,
created: Timestamp,
message: String
) {}
object RJobLog {
@ -26,7 +32,9 @@ object RJobLog {
insertRow(table, all, fr"${v.id},${v.jobId},${v.level},${v.created},${v.message}").update.run
def findLogs(id: Ident): ConnectionIO[Vector[RJobLog]] =
(selectSimple(all, table, jobId.is(id)) ++ orderBy(created.asc)).query[RJobLog].to[Vector]
(selectSimple(all, table, jobId.is(id)) ++ orderBy(created.asc))
.query[RJobLog]
.to[Vector]
def deleteAll(job: Ident): ConnectionIO[Int] =
deleteFrom(table, jobId.is(job)).update.run

View File

@ -68,7 +68,10 @@ object ROrganization {
}
def existsByName(coll: Ident, oname: String): ConnectionIO[Boolean] =
selectCount(oid, table, and(cid.is(coll), name.is(oname))).query[Int].unique.map(_ > 0)
selectCount(oid, table, and(cid.is(coll), name.is(oname)))
.query[Int]
.unique
.map(_ > 0)
def findById(id: Ident): ConnectionIO[Option[ROrganization]] = {
val sql = selectSimple(all, table, cid.is(id))
@ -93,7 +96,9 @@ object ROrganization {
val CC = RContact.Columns
val q = fr"SELECT DISTINCT" ++ commas(oid.prefix("o").f, name.prefix("o").f) ++
fr"FROM" ++ table ++ fr"o" ++
fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.orgId.prefix("c").is(oid.prefix("o")) ++
fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.orgId
.prefix("c")
.is(oid.prefix("o")) ++
fr"WHERE" ++ and(
cid.prefix("o").is(coll),
CC.kind.prefix("c").is(contactKind),
@ -103,7 +108,10 @@ object ROrganization {
q.query[IdRef].to[Vector]
}
def findAll(coll: Ident, order: Columns.type => Column): Stream[ConnectionIO, ROrganization] = {
def findAll(
coll: Ident,
order: Columns.type => Column
): Stream[ConnectionIO, ROrganization] = {
val sql = selectSimple(all, table, cid.is(coll)) ++ orderBy(order(Columns).f)
sql.query[ROrganization].stream
}

View File

@ -71,7 +71,10 @@ object RPerson {
}
def existsByName(coll: Ident, pname: String): ConnectionIO[Boolean] =
selectCount(pid, table, and(cid.is(coll), name.is(pname))).query[Int].unique.map(_ > 0)
selectCount(pid, table, and(cid.is(coll), name.is(pname)))
.query[Int]
.unique
.map(_ > 0)
def findById(id: Ident): ConnectionIO[Option[RPerson]] = {
val sql = selectSimple(all, table, cid.is(id))
@ -103,7 +106,9 @@ object RPerson {
val CC = RContact.Columns
val q = fr"SELECT DISTINCT" ++ commas(pid.prefix("p").f, name.prefix("p").f) ++
fr"FROM" ++ table ++ fr"p" ++
fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.personId.prefix("c").is(pid.prefix("p")) ++
fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.personId
.prefix("c")
.is(pid.prefix("p")) ++
fr"WHERE" ++ and(
cid.prefix("p").is(coll),
CC.kind.prefix("c").is(contactKind),
@ -114,7 +119,10 @@ object RPerson {
q.query[IdRef].to[Vector]
}
def findAll(coll: Ident, order: Columns.type => Column): Stream[ConnectionIO, RPerson] = {
def findAll(
coll: Ident,
order: Columns.type => Column
): Stream[ConnectionIO, RPerson] = {
val sql = selectSimple(all, table, cid.is(coll)) ++ orderBy(order(Columns).f)
sql.query[RPerson].stream
}

View File

@ -37,7 +37,17 @@ object RSentMail {
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
} yield RSentMail(id, uid, messageId, sender, connName, subject, recipients, body, now)
} yield RSentMail(
id,
uid,
messageId,
sender,
connName,
subject,
recipients,
body,
now
)
def forItem(
itemId: Ident,

View File

@ -86,7 +86,10 @@ object RSource {
def findCollective(sourceId: Ident): ConnectionIO[Option[Ident]] =
selectSimple(List(cid), table, sid.is(sourceId)).query[Ident].option
def findAll(coll: Ident, order: Columns.type => Column): ConnectionIO[Vector[RSource]] = {
def findAll(
coll: Ident,
order: Columns.type => Column
): ConnectionIO[Vector[RSource]] = {
val sql = selectSimple(all, table, cid.is(coll)) ++ orderBy(order(Columns).f)
sql.query[RSource].to[Vector]
}

View File

@ -29,7 +29,11 @@ object RTag {
def insert(v: RTag): ConnectionIO[Int] = {
val sql =
insertRow(table, all, fr"${v.tagId},${v.collective},${v.name},${v.category},${v.created}")
insertRow(
table,
all,
fr"${v.tagId},${v.collective},${v.name},${v.category},${v.created}"
)
sql.update.run
}

View File

@ -32,7 +32,8 @@ object RUser {
val lastLogin = Column("lastlogin")
val created = Column("created")
val all = List(uid, login, cid, password, state, email, loginCount, lastLogin, created)
val all =
List(uid, login, cid, password, state, email, loginCount, lastLogin, created)
}
import Columns._

View File

@ -178,7 +178,8 @@ object RUserEmail {
case None => Seq.empty
})
(selectSimple(all.map(_.prefix("m")), from, and(cond)) ++ orderBy(mName.f)).query[RUserEmail]
(selectSimple(all.map(_.prefix("m")), from, and(cond)) ++ orderBy(mName.f))
.query[RUserEmail]
}
def findByAccount(
@ -198,9 +199,10 @@ object RUserEmail {
deleteFrom(
table,
fr"uid in (" ++ selectSimple(Seq(uId), RUser.table, and(cond)) ++ fr") AND" ++ name.is(
connName
)
fr"uid in (" ++ selectSimple(Seq(uId), RUser.table, and(cond)) ++ fr") AND" ++ name
.is(
connName
)
).update.run
}
@ -208,5 +210,8 @@ object RUserEmail {
getByName(accId, name).map(_.isDefined)
def exists(userId: Ident, connName: Ident): ConnectionIO[Boolean] =
selectCount(id, table, and(uid.is(userId), name.is(connName))).query[Int].unique.map(_ > 0)
selectCount(id, table, and(uid.is(userId), name.is(connName)))
.query[Int]
.unique
.map(_ > 0)
}