scalafmtAll

This commit is contained in:
Eike Kettner 2020-03-26 18:26:00 +01:00
parent 09ea724c13
commit 9656ba62f4
91 changed files with 871 additions and 295 deletions

View File

@ -1,5 +1,8 @@
<img align="right" src="./artwork/logo-only.svg" height="150px" style="padding-left: 20px"/> <img align="right" src="./artwork/logo-only.svg" height="150px" style="padding-left: 20px"/>
[![Build Status](https://travis-ci.org/eikek/docspell.svg?branch=master)](https://travis-ci.org/eikek/docspell)
[![Scala Steward badge](https://img.shields.io/badge/Scala_Steward-helping-blue.svg?style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA4AAAAQCAMAAAARSr4IAAAAVFBMVEUAAACHjojlOy5NWlrKzcYRKjGFjIbp293YycuLa3pYY2LSqql4f3pCUFTgSjNodYRmcXUsPD/NTTbjRS+2jomhgnzNc223cGvZS0HaSD0XLjbaSjElhIr+AAAAAXRSTlMAQObYZgAAAHlJREFUCNdNyosOwyAIhWHAQS1Vt7a77/3fcxxdmv0xwmckutAR1nkm4ggbyEcg/wWmlGLDAA3oL50xi6fk5ffZ3E2E3QfZDCcCN2YtbEWZt+Drc6u6rlqv7Uk0LdKqqr5rk2UCRXOk0vmQKGfc94nOJyQjouF9H/wCc9gECEYfONoAAAAASUVORK5CYII=)](https://scala-steward.org)
# Docspell # Docspell

View File

@ -31,7 +31,8 @@ object Domain {
case Nil => Left(s"Not a domain: $str") case Nil => Left(s"Not a domain: $str")
case segs case segs
if segs.forall(label => if segs.forall(label =>
label.trim.nonEmpty && label.forall(c => c.isLetter || c.isDigit || c == '-') label.trim.nonEmpty && label
.forall(c => c.isLetter || c.isDigit || c == '-')
) => ) =>
Right(Domain(NonEmptyList.fromListUnsafe(segs), tld)) Right(Domain(NonEmptyList.fromListUnsafe(segs), tld))
case _ => Left(s"Not a domain: $str") case _ => Left(s"Not a domain: $str")

View File

@ -21,7 +21,12 @@ object DateFind {
.map(sd => .map(sd =>
NerDateLabel( NerDateLabel(
sd.toLocalDate, sd.toLocalDate,
NerLabel(text.substring(q.head.begin, q(2).end), NerTag.Date, q.head.begin, q(1).end) NerLabel(
text.substring(q.head.begin, q(2).end),
NerTag.Date,
q.head.begin,
q(1).end
)
) )
) )
) )
@ -62,7 +67,9 @@ object DateFind {
) )
def readMonth: Reader[Int] = def readMonth: Reader[Int] =
Reader.readFirst(w => Some(months.indexWhere(_.contains(w.value))).filter(_ > 0).map(_ + 1)) Reader.readFirst(w =>
Some(months.indexWhere(_.contains(w.value))).filter(_ > 0).map(_ + 1)
)
def readDay: Reader[Int] = def readDay: Reader[Int] =
Reader.readFirst(w => Try(w.value.toInt).filter(n => n > 0 && n <= 31).toOption) Reader.readFirst(w => Try(w.value.toInt).filter(n => n > 0 && n <= 31).toOption)
@ -90,7 +97,8 @@ object DateFind {
def readFirst[A](f: Word => Option[A]): Reader[A] = def readFirst[A](f: Word => Option[A]): Reader[A] =
Reader({ Reader({
case Nil => Result.Failure case Nil => Result.Failure
case a :: as => f(a).map(value => Result.Success(value, as)).getOrElse(Result.Failure) case a :: as =>
f(a).map(value => Result.Success(value, as)).getOrElse(Result.Failure)
}) })
} }

View File

@ -57,7 +57,9 @@ object StanfordNerClassifier {
"/edu/stanford/nlp/models/ner/german.conll.germeval2014.hgc_175m_600.crf.ser.gz" "/edu/stanford/nlp/models/ner/german.conll.germeval2014.hgc_175m_600.crf.ser.gz"
) )
case Language.English => case Language.English =>
getClass.getResource("/edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz") getClass.getResource(
"/edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz"
)
}) })
} }
} }

View File

@ -7,7 +7,8 @@ import docspell.common._
object TextAnalyserSuite extends SimpleTestSuite { object TextAnalyserSuite extends SimpleTestSuite {
test("find english ner labels") { test("find english ner labels") {
val labels = StanfordNerClassifier.nerAnnotate(Language.English)(TestFiles.letterENText) val labels =
StanfordNerClassifier.nerAnnotate(Language.English)(TestFiles.letterENText)
val expect = Vector( val expect = Vector(
NerLabel("Derek", NerTag.Person, 0, 5), NerLabel("Derek", NerTag.Person, 0, 5),
NerLabel("Jeter", NerTag.Person, 6, 11), NerLabel("Jeter", NerTag.Person, 6, 11),
@ -34,7 +35,8 @@ object TextAnalyserSuite extends SimpleTestSuite {
} }
test("find german ner labels") { test("find german ner labels") {
val labels = StanfordNerClassifier.nerAnnotate(Language.German)(TestFiles.letterDEText) val labels =
StanfordNerClassifier.nerAnnotate(Language.German)(TestFiles.letterDEText)
val expect = Vector( val expect = Vector(
NerLabel("Max", NerTag.Person, 0, 3), NerLabel("Max", NerTag.Person, 0, 3),
NerLabel("Mustermann", NerTag.Person, 4, 14), NerLabel("Mustermann", NerTag.Person, 4, 14),

View File

@ -75,6 +75,7 @@ object AuthToken {
Either.catchNonFatal(s.toLong).toOption Either.catchNonFatal(s.toLong).toOption
private def constTimeEq(s1: String, s2: String): Boolean = private def constTimeEq(s1: String, s2: String): Boolean =
s1.zip(s2).foldLeft(true)({ case (r, (c1, c2)) => r & c1 == c2 }) & s1.length == s2.length s1.zip(s2)
.foldLeft(true)({ case (r, (c1, c2)) => r & c1 == c2 }) & s1.length == s2.length
} }

View File

@ -58,7 +58,12 @@ object OCollective {
def updateFailed: PassChangeResult = UpdateFailed def updateFailed: PassChangeResult = UpdateFailed
} }
case class RegisterData(collName: Ident, login: Ident, password: Password, invite: Option[Ident]) case class RegisterData(
collName: Ident,
login: Ident,
password: Password,
invite: Option[Ident]
)
sealed trait RegisterResult { sealed trait RegisterResult {
def toEither: Either[Throwable, Unit] def toEither: Either[Throwable, Unit]
@ -117,7 +122,8 @@ object OCollective {
.traverse(_ => RUser.updatePassword(accountId, PasswordCrypt.crypt(newPass))) .traverse(_ => RUser.updatePassword(accountId, PasswordCrypt.crypt(newPass)))
res = check match { res = check match {
case Some(true) => case Some(true) =>
if (n.getOrElse(0) > 0) PassChangeResult.success else PassChangeResult.updateFailed if (n.getOrElse(0) > 0) PassChangeResult.success
else PassChangeResult.updateFailed
case Some(false) => case Some(false) =>
PassChangeResult.passwordMismatch PassChangeResult.passwordMismatch
case None => case None =>

View File

@ -8,7 +8,14 @@ import doobie._
import doobie.implicits._ import doobie.implicits._
import docspell.store.{AddResult, Store} import docspell.store.{AddResult, Store}
import docspell.store.queries.{QAttachment, QItem} import docspell.store.queries.{QAttachment, QItem}
import OItem.{AttachmentArchiveData, AttachmentData, AttachmentSourceData, ItemData, ListItem, Query} import OItem.{
AttachmentArchiveData,
AttachmentData,
AttachmentSourceData,
ItemData,
ListItem,
Query
}
import bitpeace.{FileMeta, RangeDef} import bitpeace.{FileMeta, RangeDef}
import docspell.common.{Direction, Ident, ItemState, MetaProposalList, Timestamp} import docspell.common.{Direction, Ident, ItemState, MetaProposalList, Timestamp}
import docspell.store.records._ import docspell.store.records._
@ -21,9 +28,15 @@ trait OItem[F[_]] {
def findAttachment(id: Ident, collective: Ident): F[Option[AttachmentData[F]]] def findAttachment(id: Ident, collective: Ident): F[Option[AttachmentData[F]]]
def findAttachmentSource(id: Ident, collective: Ident): F[Option[AttachmentSourceData[F]]] def findAttachmentSource(
id: Ident,
collective: Ident
): F[Option[AttachmentSourceData[F]]]
def findAttachmentArchive(id: Ident, collective: Ident): F[Option[AttachmentArchiveData[F]]] def findAttachmentArchive(
id: Ident,
collective: Ident
): F[Option[AttachmentArchiveData[F]]]
def setTags(item: Ident, tagIds: List[Ident], collective: Ident): F[AddResult] def setTags(item: Ident, tagIds: List[Ident], collective: Ident): F[AddResult]
@ -45,7 +58,11 @@ trait OItem[F[_]] {
def setItemDate(item: Ident, date: Option[Timestamp], collective: Ident): F[AddResult] def setItemDate(item: Ident, date: Option[Timestamp], collective: Ident): F[AddResult]
def setItemDueDate(item: Ident, date: Option[Timestamp], collective: Ident): F[AddResult] def setItemDueDate(
item: Ident,
date: Option[Timestamp],
collective: Ident
): F[AddResult]
def getProposals(item: Ident, collective: Ident): F[MetaProposalList] def getProposals(item: Ident, collective: Ident): F[MetaProposalList]
@ -104,7 +121,9 @@ object OItem {
Resource.pure[F, OItem[F]](new OItem[F] { Resource.pure[F, OItem[F]](new OItem[F] {
def findItem(id: Ident, collective: Ident): F[Option[ItemData]] = def findItem(id: Ident, collective: Ident): F[Option[ItemData]] =
store.transact(QItem.findItem(id)).map(opt => opt.flatMap(_.filterCollective(collective))) store
.transact(QItem.findItem(id))
.map(opt => opt.flatMap(_.filterCollective(collective)))
def findItems(q: Query, maxResults: Int): F[Vector[ListItem]] = def findItems(q: Query, maxResults: Int): F[Vector[ListItem]] =
store.transact(QItem.findItems(q).take(maxResults.toLong)).compile.toVector store.transact(QItem.findItems(q).take(maxResults.toLong)).compile.toVector
@ -126,7 +145,10 @@ object OItem {
(None: Option[AttachmentData[F]]).pure[F] (None: Option[AttachmentData[F]]).pure[F]
}) })
def findAttachmentSource(id: Ident, collective: Ident): F[Option[AttachmentSourceData[F]]] = def findAttachmentSource(
id: Ident,
collective: Ident
): F[Option[AttachmentSourceData[F]]] =
store store
.transact(RAttachmentSource.findByIdAndCollective(id, collective)) .transact(RAttachmentSource.findByIdAndCollective(id, collective))
.flatMap({ .flatMap({
@ -143,7 +165,10 @@ object OItem {
(None: Option[AttachmentSourceData[F]]).pure[F] (None: Option[AttachmentSourceData[F]]).pure[F]
}) })
def findAttachmentArchive(id: Ident, collective: Ident): F[Option[AttachmentArchiveData[F]]] = def findAttachmentArchive(
id: Ident,
collective: Ident
): F[Option[AttachmentArchiveData[F]]] =
store store
.transact(RAttachmentArchive.findByIdAndCollective(id, collective)) .transact(RAttachmentArchive.findByIdAndCollective(id, collective))
.flatMap({ .flatMap({
@ -183,38 +208,63 @@ object OItem {
store.transact(db).attempt.map(AddResult.fromUpdate) store.transact(db).attempt.map(AddResult.fromUpdate)
} }
def setDirection(item: Ident, direction: Direction, collective: Ident): F[AddResult] = def setDirection(
item: Ident,
direction: Direction,
collective: Ident
): F[AddResult] =
store store
.transact(RItem.updateDirection(item, collective, direction)) .transact(RItem.updateDirection(item, collective, direction))
.attempt .attempt
.map(AddResult.fromUpdate) .map(AddResult.fromUpdate)
def setCorrOrg(item: Ident, org: Option[Ident], collective: Ident): F[AddResult] = def setCorrOrg(item: Ident, org: Option[Ident], collective: Ident): F[AddResult] =
store.transact(RItem.updateCorrOrg(item, collective, org)).attempt.map(AddResult.fromUpdate) store
.transact(RItem.updateCorrOrg(item, collective, org))
.attempt
.map(AddResult.fromUpdate)
def setCorrPerson(item: Ident, person: Option[Ident], collective: Ident): F[AddResult] = def setCorrPerson(
item: Ident,
person: Option[Ident],
collective: Ident
): F[AddResult] =
store store
.transact(RItem.updateCorrPerson(item, collective, person)) .transact(RItem.updateCorrPerson(item, collective, person))
.attempt .attempt
.map(AddResult.fromUpdate) .map(AddResult.fromUpdate)
def setConcPerson(item: Ident, person: Option[Ident], collective: Ident): F[AddResult] = def setConcPerson(
item: Ident,
person: Option[Ident],
collective: Ident
): F[AddResult] =
store store
.transact(RItem.updateConcPerson(item, collective, person)) .transact(RItem.updateConcPerson(item, collective, person))
.attempt .attempt
.map(AddResult.fromUpdate) .map(AddResult.fromUpdate)
def setConcEquip(item: Ident, equip: Option[Ident], collective: Ident): F[AddResult] = def setConcEquip(
item: Ident,
equip: Option[Ident],
collective: Ident
): F[AddResult] =
store store
.transact(RItem.updateConcEquip(item, collective, equip)) .transact(RItem.updateConcEquip(item, collective, equip))
.attempt .attempt
.map(AddResult.fromUpdate) .map(AddResult.fromUpdate)
def setNotes(item: Ident, notes: Option[String], collective: Ident): F[AddResult] = def setNotes(item: Ident, notes: Option[String], collective: Ident): F[AddResult] =
store.transact(RItem.updateNotes(item, collective, notes)).attempt.map(AddResult.fromUpdate) store
.transact(RItem.updateNotes(item, collective, notes))
.attempt
.map(AddResult.fromUpdate)
def setName(item: Ident, name: String, collective: Ident): F[AddResult] = def setName(item: Ident, name: String, collective: Ident): F[AddResult] =
store.transact(RItem.updateName(item, collective, name)).attempt.map(AddResult.fromUpdate) store
.transact(RItem.updateName(item, collective, name))
.attempt
.map(AddResult.fromUpdate)
def setState(item: Ident, state: ItemState, collective: Ident): F[AddResult] = def setState(item: Ident, state: ItemState, collective: Ident): F[AddResult] =
store store
@ -222,10 +272,21 @@ object OItem {
.attempt .attempt
.map(AddResult.fromUpdate) .map(AddResult.fromUpdate)
def setItemDate(item: Ident, date: Option[Timestamp], collective: Ident): F[AddResult] = def setItemDate(
store.transact(RItem.updateDate(item, collective, date)).attempt.map(AddResult.fromUpdate) item: Ident,
date: Option[Timestamp],
collective: Ident
): F[AddResult] =
store
.transact(RItem.updateDate(item, collective, date))
.attempt
.map(AddResult.fromUpdate)
def setItemDueDate(item: Ident, date: Option[Timestamp], collective: Ident): F[AddResult] = def setItemDueDate(
item: Ident,
date: Option[Timestamp],
collective: Ident
): F[AddResult] =
store store
.transact(RItem.updateDueDate(item, collective, date)) .transact(RItem.updateDueDate(item, collective, date))
.attempt .attempt

View File

@ -52,7 +52,9 @@ object OJob {
def mustCancel(job: Option[RJob]): Option[(RJob, Ident)] = def mustCancel(job: Option[RJob]): Option[(RJob, Ident)] =
for { for {
worker <- job.flatMap(_.worker) worker <- job.flatMap(_.worker)
job <- job.filter(j => j.state == JobState.Scheduled || j.state == JobState.Running) job <- job.filter(j =>
j.state == JobState.Scheduled || j.state == JobState.Running
)
} yield (job, worker) } yield (job, worker)
def canDelete(j: RJob): Boolean = def canDelete(j: RJob): Boolean =
@ -68,8 +70,11 @@ object OJob {
} }
def tryCancel(job: RJob, worker: Ident): F[JobCancelResult] = def tryCancel(job: RJob, worker: Ident): F[JobCancelResult] =
joex.cancelJob(job.id, worker) joex
.map(flag => if (flag) JobCancelResult.CancelRequested else JobCancelResult.JobNotFound) .cancelJob(job.id, worker)
.map(flag =>
if (flag) JobCancelResult.CancelRequested else JobCancelResult.JobNotFound
)
for { for {
tryDel <- store.transact(tryDelete) tryDel <- store.transact(tryDelete)

View File

@ -34,7 +34,10 @@ object OJoex {
} yield cancel.isDefined } yield cancel.isDefined
}) })
def create[F[_]: ConcurrentEffect](ec: ExecutionContext, store: Store[F]): Resource[F, OJoex[F]] = def create[F[_]: ConcurrentEffect](
ec: ExecutionContext,
store: Store[F]
): Resource[F, OJoex[F]] =
JoexClient.resource(ec).flatMap(client => apply(client, store)) JoexClient.resource(ec).flatMap(client => apply(client, store))
} }

View File

@ -149,8 +149,9 @@ object OMail {
) )
} yield { } yield {
val addAttach = m.attach.filter(ras).map { a => val addAttach = m.attach.filter(ras).map { a =>
Attach[F](Stream.emit(a._2).through(store.bitpeace.fetchData2(RangeDef.all))) Attach[F](
.withFilename(a._1.name) Stream.emit(a._2).through(store.bitpeace.fetchData2(RangeDef.all))
).withFilename(a._1.name)
.withLength(a._2.length) .withLength(a._2.length)
.withMimeType(_root_.emil.MimeType.parse(a._2.mimetype.asString).toOption) .withMimeType(_root_.emil.MimeType.parse(a._2.mimetype.asString).toOption)
} }
@ -187,7 +188,10 @@ object OMail {
store.transact(save.value).attempt.map { store.transact(save.value).attempt.map {
case Right(Some(id)) => Right(id) case Right(Some(id)) => Right(id)
case Right(None) => case Right(None) =>
Left(SendResult.StoreFailure(new Exception(s"Could not find user to save mail."))) Left(
SendResult
.StoreFailure(new Exception(s"Could not find user to save mail."))
)
case Left(ex) => Left(SendResult.StoreFailure(ex)) case Left(ex) => Left(SendResult.StoreFailure(ex))
} }
} }

View File

@ -17,7 +17,10 @@ trait OOrganization[F[_]] {
def updateOrg(s: OrgAndContacts): F[AddResult] def updateOrg(s: OrgAndContacts): F[AddResult]
def findAllPerson(account: AccountId, query: Option[String]): F[Vector[PersonAndContacts]] def findAllPerson(
account: AccountId,
query: Option[String]
): F[Vector[PersonAndContacts]]
def findAllPersonRefs(account: AccountId, nameQuery: Option[String]): F[Vector[IdRef]] def findAllPersonRefs(account: AccountId, nameQuery: Option[String]): F[Vector[IdRef]]
@ -39,14 +42,20 @@ object OOrganization {
def apply[F[_]: Effect](store: Store[F]): Resource[F, OOrganization[F]] = def apply[F[_]: Effect](store: Store[F]): Resource[F, OOrganization[F]] =
Resource.pure[F, OOrganization[F]](new OOrganization[F] { Resource.pure[F, OOrganization[F]](new OOrganization[F] {
def findAllOrg(account: AccountId, query: Option[String]): F[Vector[OrgAndContacts]] = def findAllOrg(
account: AccountId,
query: Option[String]
): F[Vector[OrgAndContacts]] =
store store
.transact(QOrganization.findOrgAndContact(account.collective, query, _.name)) .transact(QOrganization.findOrgAndContact(account.collective, query, _.name))
.map({ case (org, cont) => OrgAndContacts(org, cont) }) .map({ case (org, cont) => OrgAndContacts(org, cont) })
.compile .compile
.toVector .toVector
def findAllOrgRefs(account: AccountId, nameQuery: Option[String]): F[Vector[IdRef]] = def findAllOrgRefs(
account: AccountId,
nameQuery: Option[String]
): F[Vector[IdRef]] =
store.transact(ROrganization.findAllRef(account.collective, nameQuery, _.name)) store.transact(ROrganization.findAllRef(account.collective, nameQuery, _.name))
def addOrg(s: OrgAndContacts): F[AddResult] = def addOrg(s: OrgAndContacts): F[AddResult] =
@ -55,14 +64,20 @@ object OOrganization {
def updateOrg(s: OrgAndContacts): F[AddResult] = def updateOrg(s: OrgAndContacts): F[AddResult] =
QOrganization.updateOrg(s.org, s.contacts, s.org.cid)(store) QOrganization.updateOrg(s.org, s.contacts, s.org.cid)(store)
def findAllPerson(account: AccountId, query: Option[String]): F[Vector[PersonAndContacts]] = def findAllPerson(
account: AccountId,
query: Option[String]
): F[Vector[PersonAndContacts]] =
store store
.transact(QOrganization.findPersonAndContact(account.collective, query, _.name)) .transact(QOrganization.findPersonAndContact(account.collective, query, _.name))
.map({ case (person, cont) => PersonAndContacts(person, cont) }) .map({ case (person, cont) => PersonAndContacts(person, cont) })
.compile .compile
.toVector .toVector
def findAllPersonRefs(account: AccountId, nameQuery: Option[String]): F[Vector[IdRef]] = def findAllPersonRefs(
account: AccountId,
nameQuery: Option[String]
): F[Vector[IdRef]] =
store.transact(RPerson.findAllRef(account.collective, nameQuery, _.name)) store.transact(RPerson.findAllRef(account.collective, nameQuery, _.name))
def addPerson(s: PersonAndContacts): F[AddResult] = def addPerson(s: PersonAndContacts): F[AddResult] =
@ -72,7 +87,10 @@ object OOrganization {
QOrganization.updatePerson(s.person, s.contacts, s.person.cid)(store) QOrganization.updatePerson(s.person, s.contacts, s.person.cid)(store)
def deleteOrg(orgId: Ident, collective: Ident): F[AddResult] = def deleteOrg(orgId: Ident, collective: Ident): F[AddResult] =
store.transact(QOrganization.deleteOrg(orgId, collective)).attempt.map(AddResult.fromUpdate) store
.transact(QOrganization.deleteOrg(orgId, collective))
.attempt
.map(AddResult.fromUpdate)
def deletePerson(personId: Ident, collective: Ident): F[AddResult] = def deletePerson(personId: Ident, collective: Ident): F[AddResult] =
store store

View File

@ -57,7 +57,10 @@ object OUpload {
): Resource[F, OUpload[F]] = ): Resource[F, OUpload[F]] =
Resource.pure[F, OUpload[F]](new OUpload[F] { Resource.pure[F, OUpload[F]](new OUpload[F] {
def submit(data: OUpload.UploadData[F], account: AccountId): F[OUpload.UploadResult] = def submit(
data: OUpload.UploadData[F],
account: AccountId
): F[OUpload.UploadResult] =
for { for {
files <- data.files.traverse(saveFile).map(_.flatten) files <- data.files.traverse(saveFile).map(_.flatten)
pred <- checkFileList(files) pred <- checkFileList(files)
@ -74,12 +77,16 @@ object OUpload {
job <- pred.traverse(_ => makeJobs(args, account, data.priority, data.tracker)) job <- pred.traverse(_ => makeJobs(args, account, data.priority, data.tracker))
_ <- logger.fdebug(s"Storing jobs: $job") _ <- logger.fdebug(s"Storing jobs: $job")
res <- job.traverse(submitJobs) res <- job.traverse(submitJobs)
_ <- store.transact(RSource.incrementCounter(data.meta.sourceAbbrev, account.collective)) _ <- store.transact(
RSource.incrementCounter(data.meta.sourceAbbrev, account.collective)
)
} yield res.fold(identity, identity) } yield res.fold(identity, identity)
def submit(data: OUpload.UploadData[F], sourceId: Ident): F[OUpload.UploadResult] = def submit(data: OUpload.UploadData[F], sourceId: Ident): F[OUpload.UploadResult] =
for { for {
sOpt <- store.transact(RSource.find(sourceId)).map(_.toRight(UploadResult.NoSource)) sOpt <- store
.transact(RSource.find(sourceId))
.map(_.toRight(UploadResult.NoSource))
abbrev = sOpt.map(_.abbrev).toOption.getOrElse(data.meta.sourceAbbrev) abbrev = sOpt.map(_.abbrev).toOption.getOrElse(data.meta.sourceAbbrev)
updata = data.copy(meta = data.meta.copy(sourceAbbrev = abbrev)) updata = data.copy(meta = data.meta.copy(sourceAbbrev = abbrev))
accId = sOpt.map(source => AccountId(source.cid, source.sid)) accId = sOpt.map(source => AccountId(source.cid, source.sid))
@ -106,7 +113,9 @@ object OUpload {
None None
}, id => Some(ProcessItemArgs.File(file.name, id)))) }, id => Some(ProcessItemArgs.File(file.name, id))))
private def checkFileList(files: Seq[ProcessItemArgs.File]): F[Either[UploadResult, Unit]] = private def checkFileList(
files: Seq[ProcessItemArgs.File]
): F[Either[UploadResult, Unit]] =
Sync[F].pure(if (files.isEmpty) Left(UploadResult.NoFiles) else Right(())) Sync[F].pure(if (files.isEmpty) Left(UploadResult.NoFiles) else Right(()))
private def makeJobs( private def makeJobs(

View File

@ -28,7 +28,10 @@ object OSignup {
if (cfg.mode == Config.Mode.Invite) { if (cfg.mode == Config.Mode.Invite) {
if (cfg.newInvitePassword.isEmpty || cfg.newInvitePassword != password) if (cfg.newInvitePassword.isEmpty || cfg.newInvitePassword != password)
NewInviteResult.passwordMismatch.pure[F] NewInviteResult.passwordMismatch.pure[F]
else store.transact(RInvitation.insertNew).map(ri => NewInviteResult.success(ri.id)) else
store
.transact(RInvitation.insertNew)
.map(ri => NewInviteResult.success(ri.id))
} else { } else {
Effect[F].pure(NewInviteResult.invitationClosed) Effect[F].pure(NewInviteResult.invitationClosed)
} }

View File

@ -21,7 +21,9 @@ object AccountId {
val user = input.substring(n + 1) val user = input.substring(n + 1)
Ident Ident
.fromString(coll) .fromString(coll)
.flatMap(collId => Ident.fromString(user).map(userId => AccountId(collId, userId))) .flatMap(collId =>
Ident.fromString(user).map(userId => AccountId(collId, userId))
)
case _ => case _ =>
invalid invalid
} }

View File

@ -18,7 +18,11 @@ object File {
def mkTempDir[F[_]: Sync](parent: Path, prefix: String): F[Path] = def mkTempDir[F[_]: Sync](parent: Path, prefix: String): F[Path] =
mkDir(parent).map(p => Files.createTempDirectory(p, prefix)) mkDir(parent).map(p => Files.createTempDirectory(p, prefix))
def mkTempFile[F[_]: Sync](parent: Path, prefix: String, suffix: Option[String] = None): F[Path] = def mkTempFile[F[_]: Sync](
parent: Path,
prefix: String,
suffix: Option[String] = None
): F[Path] =
mkDir(parent).map(p => Files.createTempFile(p, prefix, suffix.orNull)) mkDir(parent).map(p => Files.createTempFile(p, prefix, suffix.orNull))
def deleteDirectory[F[_]: Sync](dir: Path): F[Int] = Sync[F].delay { def deleteDirectory[F[_]: Sync](dir: Path): F[Int] = Sync[F].delay {
@ -26,7 +30,10 @@ object File {
Files.walkFileTree( Files.walkFileTree(
dir, dir,
new SimpleFileVisitor[Path]() { new SimpleFileVisitor[Path]() {
override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { override def visitFile(
file: Path,
attrs: BasicFileAttributes
): FileVisitResult = {
Files.deleteIfExists(file) Files.deleteIfExists(file)
count.incrementAndGet() count.incrementAndGet()
FileVisitResult.CONTINUE FileVisitResult.CONTINUE
@ -59,7 +66,8 @@ object File {
def withTempDir[F[_]: Sync](parent: Path, prefix: String): Resource[F, Path] = def withTempDir[F[_]: Sync](parent: Path, prefix: String): Resource[F, Path] =
Resource.make(mkTempDir(parent, prefix))(p => delete(p).map(_ => ())) Resource.make(mkTempDir(parent, prefix))(p => delete(p).map(_ => ()))
def listFiles[F[_]: Sync](pred: Path => Boolean, dir: Path): F[List[Path]] = Sync[F].delay { def listFiles[F[_]: Sync](pred: Path => Boolean, dir: Path): F[List[Path]] =
Sync[F].delay {
val javaList = val javaList =
Files.list(dir).filter(p => pred(p)).collect(java.util.stream.Collectors.toList()) Files.list(dir).filter(p => pred(p)).collect(java.util.stream.Collectors.toList())
javaList.asScala.toList.sortBy(_.getFileName.toString) javaList.asScala.toList.sortBy(_.getFileName.toString)

View File

@ -31,7 +31,8 @@ object JobState {
/** Finished with success */ /** Finished with success */
case object Success extends JobState {} case object Success extends JobState {}
val all: Set[JobState] = Set(Waiting, Scheduled, Running, Stuck, Failed, Cancelled, Success) val all: Set[JobState] =
Set(Waiting, Scheduled, Running, Stuck, Failed, Cancelled, Success)
val queued: Set[JobState] = Set(Waiting, Scheduled, Stuck) val queued: Set[JobState] = Set(Waiting, Scheduled, Stuck)
val done: Set[JobState] = Set(Failed, Cancelled, Success) val done: Set[JobState] = Set(Failed, Cancelled, Success)

View File

@ -40,7 +40,9 @@ case class LenientUri(
withQueryPlain(name, URLEncoder.encode(value, "UTF-8")) withQueryPlain(name, URLEncoder.encode(value, "UTF-8"))
def withQueryPlain(name: String, value: String): LenientUri = def withQueryPlain(name: String, value: String): LenientUri =
copy(query = query.map(q => q + "&" + name + "=" + value).orElse(Option(s"$name=$value"))) copy(query =
query.map(q => q + "&" + name + "=" + value).orElse(Option(s"$name=$value"))
)
def withFragment(f: String): LenientUri = def withFragment(f: String): LenientUri =
copy(fragment = Some(f)) copy(fragment = Some(f))
@ -56,7 +58,10 @@ case class LenientUri(
) )
} }
def readURL[F[_]: Sync: ContextShift](chunkSize: Int, blocker: Blocker): Stream[F, Byte] = def readURL[F[_]: Sync: ContextShift](
chunkSize: Int,
blocker: Blocker
): Stream[F, Byte] =
Stream Stream
.emit(Either.catchNonFatal(new URL(asString))) .emit(Either.catchNonFatal(new URL(asString)))
.covary[F] .covary[F]
@ -135,7 +140,8 @@ object LenientUri {
case "/" => RootPath case "/" => RootPath
case "" => EmptyPath case "" => EmptyPath
case _ => case _ =>
NonEmptyList.fromList(stripLeading(str, '/').split('/').toList.map(percentDecode)) match { NonEmptyList
.fromList(stripLeading(str, '/').split('/').toList.map(percentDecode)) match {
case Some(nl) => NonEmptyPath(nl) case Some(nl) => NonEmptyPath(nl)
case None => sys.error(s"Invalid url: $str") case None => sys.error(s"Invalid url: $str")
} }

View File

@ -66,10 +66,17 @@ object MimeType {
} }
def parseParams(s: String): Map[String, String] = def parseParams(s: String): Map[String, String] =
s.split(';').map(_.trim).filter(_.nonEmpty).toList.flatMap(p => p.split("=", 2).toList match { s.split(';')
.map(_.trim)
.filter(_.nonEmpty)
.toList
.flatMap(p =>
p.split("=", 2).toList match {
case a :: b :: Nil => Some((a, b)) case a :: b :: Nil => Some((a, b))
case _ => None case _ => None
}).toMap }
)
.toMap
for { for {
pt <- parsePrimary pt <- parsePrimary

View File

@ -47,14 +47,17 @@ object SystemCommand {
for { for {
_ <- writeToProcess(stdin, proc, blocker) _ <- writeToProcess(stdin, proc, blocker)
term <- Sync[F].delay(proc.waitFor(cmd.timeout.seconds, TimeUnit.SECONDS)) term <- Sync[F].delay(proc.waitFor(cmd.timeout.seconds, TimeUnit.SECONDS))
_ <- if (term) logger.debug(s"Command `${cmd.cmdString}` finished: ${proc.exitValue}") _ <- if (term)
logger.debug(s"Command `${cmd.cmdString}` finished: ${proc.exitValue}")
else else
logger.warn( logger.warn(
s"Command `${cmd.cmdString}` did not finish in ${cmd.timeout.formatExact}!" s"Command `${cmd.cmdString}` did not finish in ${cmd.timeout.formatExact}!"
) )
_ <- if (!term) timeoutError(proc, cmd) else Sync[F].pure(()) _ <- if (!term) timeoutError(proc, cmd) else Sync[F].pure(())
out <- if (term) inputStreamToString(proc.getInputStream, blocker) else Sync[F].pure("") out <- if (term) inputStreamToString(proc.getInputStream, blocker)
err <- if (term) inputStreamToString(proc.getErrorStream, blocker) else Sync[F].pure("") else Sync[F].pure("")
err <- if (term) inputStreamToString(proc.getErrorStream, blocker)
else Sync[F].pure("")
} yield Result(proc.exitValue, out, err) } yield Result(proc.exitValue, out, err)
} }
} }
@ -122,12 +125,17 @@ object SystemCommand {
proc: Process, proc: Process,
blocker: Blocker blocker: Blocker
): F[Unit] = ): F[Unit] =
data.through(io.writeOutputStream(Sync[F].delay(proc.getOutputStream), blocker)).compile.drain data
.through(io.writeOutputStream(Sync[F].delay(proc.getOutputStream), blocker))
.compile
.drain
private def timeoutError[F[_]: Sync](proc: Process, cmd: Config): F[Unit] = private def timeoutError[F[_]: Sync](proc: Process, cmd: Config): F[Unit] =
Sync[F].delay(proc.destroyForcibly()).attempt *> { Sync[F].delay(proc.destroyForcibly()).attempt *> {
Sync[F].raiseError( Sync[F].raiseError(
new Exception(s"Command `${cmd.cmdString}` timed out (${cmd.timeout.formatExact})") new Exception(
s"Command `${cmd.cmdString}` timed out (${cmd.timeout.formatExact})"
)
) )
} }
} }

View File

@ -24,11 +24,18 @@ object ThreadFactories {
): Resource[F, ExecutionContextExecutorService] = ): Resource[F, ExecutionContextExecutorService] =
Resource.make(Sync[F].delay(c))(ec => Sync[F].delay(ec.shutdown)) Resource.make(Sync[F].delay(c))(ec => Sync[F].delay(ec.shutdown))
def cached[F[_]: Sync](tf: ThreadFactory): Resource[F, ExecutionContextExecutorService] = def cached[F[_]: Sync](
tf: ThreadFactory
): Resource[F, ExecutionContextExecutorService] =
executorResource( executorResource(
ExecutionContext.fromExecutorService(Executors.newCachedThreadPool(tf)) ExecutionContext.fromExecutorService(Executors.newCachedThreadPool(tf))
) )
def fixed[F[_]: Sync](n: Int, tf: ThreadFactory): Resource[F, ExecutionContextExecutorService] = def fixed[F[_]: Sync](
executorResource(ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(n, tf))) n: Int,
tf: ThreadFactory
): Resource[F, ExecutionContextExecutorService] =
executorResource(
ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(n, tf))
)
} }

View File

@ -26,17 +26,25 @@ object Implicits {
implicit val byteVectorReader: ConfigReader[ByteVector] = implicit val byteVectorReader: ConfigReader[ByteVector] =
ConfigReader[String].emap(reason { str => ConfigReader[String].emap(reason { str =>
if (str.startsWith("hex:")) ByteVector.fromHex(str.drop(4)).toRight("Invalid hex value.") if (str.startsWith("hex:"))
ByteVector.fromHex(str.drop(4)).toRight("Invalid hex value.")
else if (str.startsWith("b64:")) else if (str.startsWith("b64:"))
ByteVector.fromBase64(str.drop(4)).toRight("Invalid Base64 string.") ByteVector.fromBase64(str.drop(4)).toRight("Invalid Base64 string.")
else ByteVector.encodeUtf8(str).left.map(ex => s"Invalid utf8 string: ${ex.getMessage}") else
ByteVector
.encodeUtf8(str)
.left
.map(ex => s"Invalid utf8 string: ${ex.getMessage}")
}) })
implicit val caleventReader: ConfigReader[CalEvent] = implicit val caleventReader: ConfigReader[CalEvent] =
ConfigReader[String].emap(reason(CalEvent.parse)) ConfigReader[String].emap(reason(CalEvent.parse))
def reason[A: ClassTag](
def reason[A: ClassTag](f: String => Either[String, A]): String => Either[FailureReason, A] = f: String => Either[String, A]
): String => Either[FailureReason, A] =
in => in =>
f(in).left.map(str => CannotConvert(in, implicitly[ClassTag[A]].runtimeClass.toString, str)) f(in).left.map(str =>
CannotConvert(in, implicitly[ClassTag[A]].runtimeClass.toString, str)
)
} }

View File

@ -33,13 +33,21 @@ object NerLabelSpanTest extends SimpleTestSuite {
) )
val spans = NerLabelSpan.build(labels) val spans = NerLabelSpan.build(labels)
assertEquals(spans, Vector( assertEquals(
spans,
Vector(
NerLabel("Derek Jeter", NerTag.Person, 0, 11), NerLabel("Derek Jeter", NerTag.Person, 0, 11),
NerLabel("Derek Jeter", NerTag.Person, 68, 79), NerLabel("Derek Jeter", NerTag.Person, 68, 79),
NerLabel("Syrup Production Old Sticky Pancake Company", NerTag.Organization, 162, 205), NerLabel(
"Syrup Production Old Sticky Pancake Company",
NerTag.Organization,
162,
205
),
NerLabel("Maple Lane", NerTag.Location, 210, 220), NerLabel("Maple Lane", NerTag.Location, 210, 220),
NerLabel("Little League", NerTag.Organization, 351, 364), NerLabel("Little League", NerTag.Organization, 351, 364),
NerLabel("Derek Jeter", NerTag.Person, 1121, 1132) NerLabel("Derek Jeter", NerTag.Person, 1121, 1132)
)) )
)
} }
} }

View File

@ -43,11 +43,13 @@ object ConversionResult {
case class SuccessPdf[F[_]](pdf: Stream[F, Byte]) extends ConversionResult[F] { case class SuccessPdf[F[_]](pdf: Stream[F, Byte]) extends ConversionResult[F] {
val pdfData = pdf val pdfData = pdf
} }
case class SuccessPdfTxt[F[_]](pdf: Stream[F, Byte], txt: F[String]) extends ConversionResult[F] { case class SuccessPdfTxt[F[_]](pdf: Stream[F, Byte], txt: F[String])
extends ConversionResult[F] {
val pdfData = pdf val pdfData = pdf
} }
case class InputMalformed[F[_]](mimeType: MimeType, reason: String) extends ConversionResult[F] { case class InputMalformed[F[_]](mimeType: MimeType, reason: String)
extends ConversionResult[F] {
val pdfData = Stream.empty val pdfData = Stream.empty
} }
} }

View File

@ -40,9 +40,18 @@ private[extern] object ExternConv {
in.through(createInput).flatMap { _ => in.through(createInput).flatMap { _ =>
SystemCommand SystemCommand
.execSuccess[F](sysCfg, blocker, logger, Some(dir), if (useStdin) in else Stream.empty) .execSuccess[F](
sysCfg,
blocker,
logger,
Some(dir),
if (useStdin) in
else Stream.empty
)
.evalMap(result => .evalMap(result =>
logResult(name, result, logger).flatMap(_ => reader(out, result)).flatMap(handler.run) logResult(name, result, logger)
.flatMap(_ => reader(out, result))
.flatMap(handler.run)
) )
} }
} }
@ -106,7 +115,9 @@ private[extern] object ExternConv {
inFile: Path inFile: Path
): Pipe[F, Byte, Unit] = ): Pipe[F, Byte, Unit] =
in => in =>
Stream.eval(logger.debug(s"Storing input to file ${inFile} for running $name")).drain ++ Stream
.eval(logger.debug(s"Storing input to file ${inFile} for running $name"))
.drain ++
Stream.eval(storeFile(in, inFile, blocker)) Stream.eval(storeFile(in, inFile, blocker))
private def logResult[F[_]: Sync]( private def logResult[F[_]: Sync](

View File

@ -19,7 +19,15 @@ object Unoconv {
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] = val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] =
ExternConv.readResult[F](blocker, chunkSize, logger) ExternConv.readResult[F](blocker, chunkSize, logger)
ExternConv.toPDF[F, A]("unoconv", cfg.command, cfg.workingDir, false, blocker, logger, reader)( ExternConv.toPDF[F, A](
"unoconv",
cfg.command,
cfg.workingDir,
false,
blocker,
logger,
reader
)(
in, in,
handler handler
) )

View File

@ -18,7 +18,11 @@ import docspell.common._
object Markdown { object Markdown {
def toHtml(is: InputStream, cfg: MarkdownConfig, cs: Charset): Either[Throwable, String] = { def toHtml(
is: InputStream,
cfg: MarkdownConfig,
cs: Charset
): Either[Throwable, String] = {
val p = createParser() val p = createParser()
val r = createRenderer() val r = createRenderer()
Try { Try {
@ -35,7 +39,11 @@ object Markdown {
wrapHtml(r.render(doc), cfg) wrapHtml(r.render(doc), cfg)
} }
def toHtml[F[_]: Sync](data: Stream[F, Byte], cfg: MarkdownConfig, cs: Charset): F[String] = def toHtml[F[_]: Sync](
data: Stream[F, Byte],
cfg: MarkdownConfig,
cs: Charset
): F[String] =
data.through(Binary.decode(cs)).compile.foldMonoid.map(str => toHtml(str, cfg)) data.through(Binary.decode(cs)).compile.foldMonoid.map(str => toHtml(str, cfg))
private def wrapHtml(body: String, cfg: MarkdownConfig): String = private def wrapHtml(body: String, cfg: MarkdownConfig): String =

View File

@ -13,7 +13,11 @@ import docspell.files.ImageSize
trait Extraction[F[_]] { trait Extraction[F[_]] {
def extractText(data: Stream[F, Byte], dataType: DataType, lang: Language): F[ExtractResult] def extractText(
data: Stream[F, Byte],
dataType: DataType,
lang: Language
): F[ExtractResult]
} }
@ -71,13 +75,17 @@ object Extraction {
doExtract doExtract
} }
case None => case None =>
logger.info(s"Cannot read image data from ${mt.asString}. Extracting anyways.") *> logger.info(
s"Cannot read image data from ${mt.asString}. Extracting anyways."
) *>
doExtract doExtract
} }
case OdfType.ContainerMatch(_) => case OdfType.ContainerMatch(_) =>
logger logger
.info(s"File detected as ${OdfType.container}. Try to read as OpenDocument file.") *> .info(
s"File detected as ${OdfType.container}. Try to read as OpenDocument file."
) *>
OdfExtract.get(data).map(ExtractResult.fromEither) OdfExtract.get(data).map(ExtractResult.fromEither)
case mt @ MimeType("text", sub, _) if !sub.contains("html") => case mt @ MimeType("text", sub, _) if !sub.contains("html") =>

View File

@ -135,7 +135,9 @@ object Ocr {
.map(_ => targetFile) .map(_ => targetFile)
.handleErrorWith { th => .handleErrorWith { th =>
logger logger
.warn(s"Unpaper command failed: ${th.getMessage}. Using input file for text extraction.") .warn(
s"Unpaper command failed: ${th.getMessage}. Using input file for text extraction."
)
Stream.emit(img) Stream.emit(img)
} }
} }
@ -152,10 +154,15 @@ object Ocr {
): Stream[F, String] = ): Stream[F, String] =
// tesseract cannot cope with absolute filenames // tesseract cannot cope with absolute filenames
// so use the parent as working dir // so use the parent as working dir
runUnpaperFile(img, config.unpaper.command, img.getParent, blocker, logger).flatMap { uimg => runUnpaperFile(img, config.unpaper.command, img.getParent, blocker, logger).flatMap {
uimg =>
val cmd = config.tesseract.command val cmd = config.tesseract.command
.replace(Map("{{file}}" -> uimg.getFileName.toString, "{{lang}}" -> fixLanguage(lang))) .replace(
SystemCommand.execSuccess[F](cmd, blocker, logger, wd = Some(uimg.getParent)).map(_.stdout) Map("{{file}}" -> uimg.getFileName.toString, "{{lang}}" -> fixLanguage(lang))
)
SystemCommand
.execSuccess[F](cmd, blocker, logger, wd = Some(uimg.getParent))
.map(_.stdout)
} }
/** Run tesseract on the given image file and return the extracted /** Run tesseract on the given image file and return the extracted

View File

@ -41,11 +41,16 @@ object OcrConfig {
Paths.get(System.getProperty("java.io.tmpdir")).resolve("docspell-extraction") Paths.get(System.getProperty("java.io.tmpdir")).resolve("docspell-extraction")
), ),
unpaper = Unpaper( unpaper = Unpaper(
SystemCommand.Config("unpaper", Seq("{{infile}}", "{{outfile}}"), Duration.seconds(30)) SystemCommand
.Config("unpaper", Seq("{{infile}}", "{{outfile}}"), Duration.seconds(30))
), ),
tesseract = Tesseract( tesseract = Tesseract(
SystemCommand SystemCommand
.Config("tesseract", Seq("{{file}}", "stdout", "-l", "{{lang}}"), Duration.minutes(1)) .Config(
"tesseract",
Seq("{{file}}", "stdout", "-l", "{{lang}}"),
Duration.minutes(1)
)
) )
) )
} }

View File

@ -14,7 +14,9 @@ import fs2.Stream
object PdfboxExtract { object PdfboxExtract {
def get[F[_]: Sync](data: Stream[F, Byte]): F[Either[Throwable, String]] = def get[F[_]: Sync](data: Stream[F, Byte]): F[Either[Throwable, String]] =
data.compile.to(Array).map(bytes => Using(PDDocument.load(bytes))(readText).toEither.flatten) data.compile
.to(Array)
.map(bytes => Using(PDDocument.load(bytes))(readText).toEither.flatten)
def get(is: InputStream): Either[Throwable, String] = def get(is: InputStream): Either[Throwable, String] =
Using(PDDocument.load(is))(readText).toEither.flatten Using(PDDocument.load(is))(readText).toEither.flatten

View File

@ -20,10 +20,16 @@ import docspell.files.TikaMimetype
object PoiExtract { object PoiExtract {
def get[F[_]: Sync](data: Stream[F, Byte], hint: MimeTypeHint): F[Either[Throwable, String]] = def get[F[_]: Sync](
data: Stream[F, Byte],
hint: MimeTypeHint
): F[Either[Throwable, String]] =
TikaMimetype.detect(data, hint).flatMap(mt => get(data, mt)) TikaMimetype.detect(data, hint).flatMap(mt => get(data, mt))
def get[F[_]: Sync](data: Stream[F, Byte], mime: MimeType): F[Either[Throwable, String]] = def get[F[_]: Sync](
data: Stream[F, Byte],
mime: MimeType
): F[Either[Throwable, String]] =
mime match { mime match {
case PoiType.doc => case PoiType.doc =>
getDoc(data) getDoc(data)

View File

@ -6,7 +6,8 @@ object PoiType {
val msoffice = MimeType.application("x-tika-msoffice") val msoffice = MimeType.application("x-tika-msoffice")
val ooxml = MimeType.application("x-tika-ooxml") val ooxml = MimeType.application("x-tika-ooxml")
val docx = MimeType.application("vnd.openxmlformats-officedocument.wordprocessingml.document") val docx =
MimeType.application("vnd.openxmlformats-officedocument.wordprocessingml.document")
val xlsx = MimeType.application("vnd.openxmlformats-officedocument.spreadsheetml.sheet") val xlsx = MimeType.application("vnd.openxmlformats-officedocument.spreadsheetml.sheet")
val xls = MimeType.application("vnd.ms-excel") val xls = MimeType.application("vnd.ms-excel")
val doc = MimeType.application("msword") val doc = MimeType.application("msword")

View File

@ -15,7 +15,10 @@ object Playing extends IOApp {
val x = for { val x = for {
odsm1 <- TikaMimetype odsm1 <- TikaMimetype
.detect(rtf, MimeTypeHint.filename(ExampleFiles.examples_sample_rtf.path.segments.last)) .detect(
rtf,
MimeTypeHint.filename(ExampleFiles.examples_sample_rtf.path.segments.last)
)
odsm2 <- TikaMimetype.detect(rtf, MimeTypeHint.none) odsm2 <- TikaMimetype.detect(rtf, MimeTypeHint.none)
} yield (odsm1, odsm2) } yield (odsm1, odsm2)
println(x.unsafeRunSync()) println(x.unsafeRunSync())

View File

@ -14,9 +14,10 @@ object ZipTest extends SimpleTestSuite {
val zipFile = ExampleFiles.letters_zip.readURL[IO](8192, blocker) val zipFile = ExampleFiles.letters_zip.readURL[IO](8192, blocker)
val uncomp = zipFile.through(Zip.unzip(8192, blocker)) val uncomp = zipFile.through(Zip.unzip(8192, blocker))
uncomp.evalMap(entry => { uncomp
.evalMap { entry =>
val x = entry.data.map(_ => 1).foldMonoid.compile.lastOrError val x = entry.data.map(_ => 1).foldMonoid.compile.lastOrError
x.map(size => { x.map { size =>
if (entry.name.endsWith(".pdf")) { if (entry.name.endsWith(".pdf")) {
assertEquals(entry.name, "letter-de.pdf") assertEquals(entry.name, "letter-de.pdf")
assertEquals(size, 34815) assertEquals(size, 34815)
@ -24,7 +25,10 @@ object ZipTest extends SimpleTestSuite {
assertEquals(entry.name, "letter-en.txt") assertEquals(entry.name, "letter-en.txt")
assertEquals(size, 1131) assertEquals(size, 1131)
} }
}) }
}).compile.drain.unsafeRunSync }
.compile
.drain
.unsafeRunSync
} }
} }

View File

@ -12,8 +12,10 @@ import org.log4s._
object Main extends IOApp { object Main extends IOApp {
private[this] val logger = getLogger private[this] val logger = getLogger
val blockingEC = ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-joex-blocking")) val blockingEC =
val connectEC = ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-joex-dbconnect")) ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-joex-blocking"))
val connectEC =
ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-joex-dbconnect"))
def run(args: List[String]) = { def run(args: List[String]) = {
args match { args match {
@ -52,9 +54,17 @@ object Main extends IOApp {
blocker = Blocker.liftExecutorService(bec) blocker = Blocker.liftExecutorService(bec)
} yield Pools(cec, bec, blocker) } yield Pools(cec, bec, blocker)
pools.use(p => pools.use(p =>
JoexServer.stream[IO](cfg, p.connectEC, p.clientEC, p.blocker).compile.drain.as(ExitCode.Success) JoexServer
.stream[IO](cfg, p.connectEC, p.clientEC, p.blocker)
.compile
.drain
.as(ExitCode.Success)
) )
} }
case class Pools(connectEC: ExecutionContext, clientEC: ExecutionContext, blocker: Blocker) case class Pools(
connectEC: ExecutionContext,
clientEC: ExecutionContext,
blocker: Blocker
)
} }

View File

@ -16,7 +16,8 @@ object HouseKeepingTask {
val taskName: Ident = Ident.unsafe("housekeeping") val taskName: Ident = Ident.unsafe("housekeeping")
def apply[F[_]: Sync](cfg: Config): Task[F, Unit, Unit] = def apply[F[_]: Sync](cfg: Config): Task[F, Unit, Unit] =
Task.log[F](_.info(s"Running house-keeping task now")) Task
.log[F](_.info(s"Running house-keeping task now"))
.flatMap(_ => CleanupInvitesTask(cfg.houseKeeping.cleanupInvites)) .flatMap(_ => CleanupInvitesTask(cfg.houseKeeping.cleanupInvites))
.flatMap(_ => CleanupJobsTask(cfg.houseKeeping.cleanupJobs)) .flatMap(_ => CleanupJobsTask(cfg.houseKeeping.cleanupJobs))

View File

@ -25,7 +25,10 @@ object CreateItem {
Task { ctx => Task { ctx =>
def isValidFile(fm: FileMeta) = def isValidFile(fm: FileMeta) =
ctx.args.meta.validFileTypes.isEmpty || ctx.args.meta.validFileTypes.isEmpty ||
ctx.args.meta.validFileTypes.map(_.asString).toSet.contains(fm.mimetype.baseType) ctx.args.meta.validFileTypes
.map(_.asString)
.toSet
.contains(fm.mimetype.baseType)
def fileMetas(itemId: Ident, now: Timestamp) = def fileMetas(itemId: Ident, now: Timestamp) =
Stream Stream
@ -37,7 +40,9 @@ object CreateItem {
case (f, index) => case (f, index) =>
Ident Ident
.randomId[F] .randomId[F]
.map(id => RAttachment(id, itemId, f.fileMetaId, index.toInt, now, f.name)) .map(id =>
RAttachment(id, itemId, f.fileMetaId, index.toInt, now, f.name)
)
}) })
.compile .compile
.toVector .toVector
@ -51,7 +56,9 @@ object CreateItem {
) )
for { for {
_ <- ctx.logger.info(s"Creating new item with ${ctx.args.files.size} attachment(s)") _ <- ctx.logger.info(
s"Creating new item with ${ctx.args.files.size} attachment(s)"
)
time <- Duration.stopTime[F] time <- Duration.stopTime[F]
it <- item it <- item
n <- ctx.store.transact(RItem.insert(it)) n <- ctx.store.transact(RItem.insert(it))
@ -61,7 +68,13 @@ object CreateItem {
_ <- logDifferences(ctx, fm, k.sum) _ <- logDifferences(ctx, fm, k.sum)
dur <- time dur <- time
_ <- ctx.logger.info(s"Creating item finished in ${dur.formatExact}") _ <- ctx.logger.info(s"Creating item finished in ${dur.formatExact}")
} yield ItemData(it, fm, Vector.empty, Vector.empty, fm.map(a => a.id -> a.fileId).toMap) } yield ItemData(
it,
fm,
Vector.empty,
Vector.empty,
fm.map(a => a.id -> a.fileId).toMap
)
} }
def insertAttachment[F[_]: Sync](ctx: Context[F, _])(ra: RAttachment): F[Int] = { def insertAttachment[F[_]: Sync](ctx: Context[F, _])(ra: RAttachment): F[Int] = {
@ -79,7 +92,8 @@ object CreateItem {
_ <- if (cand.nonEmpty) ctx.logger.warn("Found existing item with these files.") _ <- if (cand.nonEmpty) ctx.logger.warn("Found existing item with these files.")
else ().pure[F] else ().pure[F]
ht <- cand.drop(1).traverse(ri => QItem.delete(ctx.store)(ri.id, ri.cid)) ht <- cand.drop(1).traverse(ri => QItem.delete(ctx.store)(ri.id, ri.cid))
_ <- if (ht.sum > 0) ctx.logger.warn(s"Removed ${ht.sum} items with same attachments") _ <- if (ht.sum > 0)
ctx.logger.warn(s"Removed ${ht.sum} items with same attachments")
else ().pure[F] else ().pure[F]
rms <- OptionT( rms <- OptionT(
cand.headOption.traverse(ri => cand.headOption.traverse(ri =>
@ -92,7 +106,9 @@ object CreateItem {
origMap = orig origMap = orig
.map(originFileTuple) .map(originFileTuple)
.toMap .toMap
} yield cand.headOption.map(ri => ItemData(ri, rms, Vector.empty, Vector.empty, origMap)) } yield cand.headOption.map(ri =>
ItemData(ri, rms, Vector.empty, Vector.empty, origMap)
)
} }
private def logDifferences[F[_]: Sync]( private def logDifferences[F[_]: Sync](
@ -114,6 +130,8 @@ object CreateItem {
} }
//TODO if no source is present, it must be saved! //TODO if no source is present, it must be saved!
private def originFileTuple(t: (RAttachment, Option[RAttachmentSource])): (Ident, Ident) = private def originFileTuple(
t: (RAttachment, Option[RAttachmentSource])
): (Ident, Ident) =
t._2.map(s => s.id -> s.fileId).getOrElse(t._1.id -> t._1.fileId) t._2.map(s => s.id -> s.fileId).getOrElse(t._1.id -> t._1.fileId)
} }

View File

@ -24,7 +24,10 @@ case class ItemData(
copy(metas = next) copy(metas = next)
} }
def changeMeta(attachId: Ident, f: RAttachmentMeta => RAttachmentMeta): RAttachmentMeta = def changeMeta(
attachId: Ident,
f: RAttachmentMeta => RAttachmentMeta
): RAttachmentMeta =
f(findOrCreate(attachId)) f(findOrCreate(attachId))
def findOrCreate(attachId: Ident): RAttachmentMeta = def findOrCreate(attachId: Ident): RAttachmentMeta =

View File

@ -10,15 +10,21 @@ import docspell.store.records.{RItem, RJob}
object ItemHandler { object ItemHandler {
def onCancel[F[_]: Sync: ContextShift]: Task[F, ProcessItemArgs, Unit] = def onCancel[F[_]: Sync: ContextShift]: Task[F, ProcessItemArgs, Unit] =
logWarn("Now cancelling. Deleting potentially created data.").flatMap(_ => deleteByFileIds) logWarn("Now cancelling. Deleting potentially created data.").flatMap(_ =>
deleteByFileIds
)
def apply[F[_]: ConcurrentEffect: ContextShift](cfg: Config): Task[F, ProcessItemArgs, Unit] = def apply[F[_]: ConcurrentEffect: ContextShift](
cfg: Config
): Task[F, ProcessItemArgs, Unit] =
CreateItem[F] CreateItem[F]
.flatMap(itemStateTask(ItemState.Processing)) .flatMap(itemStateTask(ItemState.Processing))
.flatMap(safeProcess[F](cfg)) .flatMap(safeProcess[F](cfg))
.map(_ => ()) .map(_ => ())
def itemStateTask[F[_]: Sync, A](state: ItemState)(data: ItemData): Task[F, A, ItemData] = def itemStateTask[F[_]: Sync, A](
state: ItemState
)(data: ItemData): Task[F, A, ItemData] =
Task(ctx => ctx.store.transact(RItem.updateState(data.item.id, state)).map(_ => data)) Task(ctx => ctx.store.transact(RItem.updateState(data.item.id, state)).map(_ => data))
def isLastRetry[F[_]: Sync, A](ctx: Context[F, A]): F[Boolean] = def isLastRetry[F[_]: Sync, A](ctx: Context[F, A]): F[Boolean] =
@ -36,8 +42,9 @@ object ItemHandler {
case Right(d) => case Right(d) =>
Task.pure(d) Task.pure(d)
case Left(ex) => case Left(ex) =>
logWarn[F]("Processing failed on last retry. Creating item but without proposals.") logWarn[F](
.flatMap(_ => itemStateTask(ItemState.Created)(data)) "Processing failed on last retry. Creating item but without proposals."
).flatMap(_ => itemStateTask(ItemState.Created)(data))
.andThen(_ => Sync[F].raiseError(ex)) .andThen(_ => Sync[F].raiseError(ex))
}) })
case false => case false =>

View File

@ -48,10 +48,13 @@ object TextAnalysis {
rm.content.map(Contact.annotate).getOrElse(Vector.empty) rm.content.map(Contact.annotate).getOrElse(Vector.empty)
} }
def dateNer[F[_]: Sync](rm: RAttachmentMeta, lang: Language): F[AttachmentDates] = Sync[F].delay { def dateNer[F[_]: Sync](rm: RAttachmentMeta, lang: Language): F[AttachmentDates] =
Sync[F].delay {
AttachmentDates( AttachmentDates(
rm, rm,
rm.content.map(txt => DateFind.findDates(txt, lang).toVector).getOrElse(Vector.empty) rm.content
.map(txt => DateFind.findDates(txt, lang).toVector)
.getOrElse(Vector.empty)
) )
} }

View File

@ -19,7 +19,9 @@ object TextExtraction {
for { for {
_ <- ctx.logger.info("Starting text extraction") _ <- ctx.logger.info("Starting text extraction")
start <- Duration.stopTime[F] start <- Duration.stopTime[F]
txt <- item.attachments.traverse(extractTextIfEmpty(ctx, cfg, ctx.args.meta.language, item)) txt <- item.attachments.traverse(
extractTextIfEmpty(ctx, cfg, ctx.args.meta.language, item)
)
_ <- ctx.logger.debug("Storing extracted texts") _ <- ctx.logger.debug("Storing extracted texts")
_ <- txt.toList.traverse(rm => ctx.store.transact(RAttachmentMeta.upsert(rm))) _ <- txt.toList.traverse(rm => ctx.store.transact(RAttachmentMeta.upsert(rm)))
dur <- start dur <- start
@ -53,7 +55,10 @@ object TextExtraction {
_ <- ctx.logger.debug(s"Extracting text for attachment ${stripAttachmentName(ra)}") _ <- ctx.logger.debug(s"Extracting text for attachment ${stripAttachmentName(ra)}")
dst <- Duration.stopTime[F] dst <- Duration.stopTime[F]
txt <- extractTextFallback(ctx, cfg, ra, lang)(filesToExtract(item, ra)) txt <- extractTextFallback(ctx, cfg, ra, lang)(filesToExtract(item, ra))
meta = item.changeMeta(ra.id, rm => rm.setContentIfEmpty(txt.map(_.trim).filter(_.nonEmpty))) meta = item.changeMeta(
ra.id,
rm => rm.setContentIfEmpty(txt.map(_.trim).filter(_.nonEmpty))
)
est <- dst est <- dst
_ <- ctx.logger.debug( _ <- ctx.logger.debug(
s"Extracting text for attachment ${stripAttachmentName(ra)} finished in ${est.formatExact}" s"Extracting text for attachment ${stripAttachmentName(ra)} finished in ${est.formatExact}"
@ -76,7 +81,9 @@ object TextExtraction {
.getOrElse(Mimetype.`application/octet-stream`) .getOrElse(Mimetype.`application/octet-stream`)
findMime findMime
.flatMap(mt => extr.extractText(data, DataType(MimeType(mt.primary, mt.sub, mt.params)), lang)) .flatMap(mt =>
extr.extractText(data, DataType(MimeType(mt.primary, mt.sub, mt.params)), lang)
)
} }
private def extractTextFallback[F[_]: Sync: ContextShift]( private def extractTextFallback[F[_]: Sync: ContextShift](

View File

@ -49,7 +49,9 @@ object JoexRoutes {
case POST -> Root / "job" / Ident(id) / "cancel" => case POST -> Root / "job" / Ident(id) / "cancel" =>
for { for {
flag <- app.scheduler.requestCancel(id) flag <- app.scheduler.requestCancel(id)
resp <- Ok(BasicResult(flag, if (flag) "Cancel request submitted" else "Job not found")) resp <- Ok(
BasicResult(flag, if (flag) "Cancel request submitted" else "Job not found")
)
} yield resp } yield resp
} }
} }

View File

@ -16,11 +16,19 @@ import io.circe.Decoder
* convenience constructor that uses circe to decode json into some * convenience constructor that uses circe to decode json into some
* type A. * type A.
*/ */
case class JobTask[F[_]](name: Ident, task: Task[F, String, Unit], onCancel: Task[F, String, Unit]) case class JobTask[F[_]](
name: Ident,
task: Task[F, String, Unit],
onCancel: Task[F, String, Unit]
)
object JobTask { object JobTask {
def json[F[_]: Sync, A](name: Ident, task: Task[F, A, Unit], onCancel: Task[F, A, Unit])( def json[F[_]: Sync, A](
name: Ident,
task: Task[F, A, Unit],
onCancel: Task[F, A, Unit]
)(
implicit D: Decoder[A] implicit D: Decoder[A]
): JobTask[F] = { ): JobTask[F] = {
val convert: String => F[A] = val convert: String => F[A] =

View File

@ -20,7 +20,12 @@ case class LogEvent(
object LogEvent { object LogEvent {
def create[F[_]: Sync](jobId: Ident, jobInfo: String, level: LogLevel, msg: String): F[LogEvent] = def create[F[_]: Sync](
jobId: Ident,
jobInfo: String,
level: LogLevel,
msg: String
): F[LogEvent] =
Timestamp.current[F].map(now => LogEvent(jobId, jobInfo, now, level, msg)) Timestamp.current[F].map(now => LogEvent(jobId, jobInfo, now, level, msg))
} }

View File

@ -42,7 +42,16 @@ object PeriodicScheduler {
for { for {
waiter <- Resource.liftF(SignallingRef(true)) waiter <- Resource.liftF(SignallingRef(true))
state <- Resource.liftF(SignallingRef(PeriodicSchedulerImpl.emptyState[F])) state <- Resource.liftF(SignallingRef(PeriodicSchedulerImpl.emptyState[F]))
psch = new PeriodicSchedulerImpl[F](cfg, sch, queue, store, client, waiter, state, timer) psch = new PeriodicSchedulerImpl[F](
cfg,
sch,
queue,
store,
client,
waiter,
state,
timer
)
_ <- Resource.liftF(psch.init) _ <- Resource.liftF(psch.init)
} yield psch } yield psch

View File

@ -7,7 +7,11 @@ import fs2.concurrent.Queue
object QueueLogger { object QueueLogger {
def create[F[_]: Sync](jobId: Ident, jobInfo: String, q: Queue[F, LogEvent]): Logger[F] = def create[F[_]: Sync](
jobId: Ident,
jobInfo: String,
q: Queue[F, LogEvent]
): Logger[F] =
new Logger[F] { new Logger[F] {
def trace(msg: => String): F[Unit] = def trace(msg: => String): F[Unit] =
LogEvent.create[F](jobId, jobInfo, LogLevel.Debug, msg).flatMap(q.enqueue1) LogEvent.create[F](jobId, jobInfo, LogLevel.Debug, msg).flatMap(q.enqueue1)

View File

@ -38,7 +38,9 @@ case class SchedulerBuilder[F[_]: ConcurrentEffect: ContextShift](
copy(queue = Resource.pure[F, JobQueue[F]](queue)) copy(queue = Resource.pure[F, JobQueue[F]](queue))
def serve: Resource[F, Scheduler[F]] = def serve: Resource[F, Scheduler[F]] =
resource.evalMap(sch => ConcurrentEffect[F].start(sch.start.compile.drain).map(_ => sch)) resource.evalMap(sch =>
ConcurrentEffect[F].start(sch.start.compile.drain).map(_ => sch)
)
def resource: Resource[F, Scheduler[F]] = { def resource: Resource[F, Scheduler[F]] = {
val scheduler = for { val scheduler = for {
@ -46,7 +48,17 @@ case class SchedulerBuilder[F[_]: ConcurrentEffect: ContextShift](
waiter <- Resource.liftF(SignallingRef(true)) waiter <- Resource.liftF(SignallingRef(true))
state <- Resource.liftF(SignallingRef(SchedulerImpl.emptyState[F])) state <- Resource.liftF(SignallingRef(SchedulerImpl.emptyState[F]))
perms <- Resource.liftF(Semaphore(config.poolSize.toLong)) perms <- Resource.liftF(Semaphore(config.poolSize.toLong))
} yield new SchedulerImpl[F](config, blocker, jq, tasks, store, logSink, state, waiter, perms) } yield new SchedulerImpl[F](
config,
blocker,
jq,
tasks,
store,
logSink,
state,
waiter,
perms
)
scheduler.evalTap(_.init).map(s => s: Scheduler[F]) scheduler.evalTap(_.init).map(s => s: Scheduler[F])
} }

View File

@ -50,7 +50,8 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
def requestCancel(jobId: Ident): F[Boolean] = def requestCancel(jobId: Ident): F[Boolean] =
state.get.flatMap(_.cancelRequest(jobId) match { state.get.flatMap(_.cancelRequest(jobId) match {
case Some(ct) => ct.map(_ => true) case Some(ct) => ct.map(_ => true)
case None => logger.fwarn(s"Job ${jobId.id} not found, cannot cancel.").map(_ => false) case None =>
logger.fwarn(s"Job ${jobId.id} not found, cannot cancel.").map(_ => false)
}) })
def notifyChange: F[Unit] = def notifyChange: F[Unit] =
@ -67,12 +68,15 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
.eval(runShutdown) .eval(runShutdown)
.evalMap(_ => logger.finfo("Scheduler is shutting down now.")) .evalMap(_ => logger.finfo("Scheduler is shutting down now."))
.flatMap(_ => .flatMap(_ =>
Stream.eval(state.get) ++ Stream.suspend(state.discrete.takeWhile(_.getRunning.nonEmpty)) Stream.eval(state.get) ++ Stream
.suspend(state.discrete.takeWhile(_.getRunning.nonEmpty))
) )
.flatMap { state => .flatMap { state =>
if (state.getRunning.isEmpty) Stream.eval(logger.finfo("No jobs running.")) if (state.getRunning.isEmpty) Stream.eval(logger.finfo("No jobs running."))
else else
Stream.eval(logger.finfo(s"Waiting for ${state.getRunning.size} jobs to finish.")) ++ Stream.eval(
logger.finfo(s"Waiting for ${state.getRunning.size} jobs to finish.")
) ++
Stream.emit(state) Stream.emit(state)
} }
@ -86,11 +90,14 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
def mainLoop: Stream[F, Nothing] = { def mainLoop: Stream[F, Nothing] = {
val body: F[Boolean] = val body: F[Boolean] =
for { for {
_ <- permits.available.flatMap(a => logger.fdebug(s"Try to acquire permit ($a free)")) _ <- permits.available.flatMap(a =>
logger.fdebug(s"Try to acquire permit ($a free)")
)
_ <- permits.acquire _ <- permits.acquire
_ <- logger.fdebug("New permit acquired") _ <- logger.fdebug("New permit acquired")
down <- state.get.map(_.shutdownRequest) down <- state.get.map(_.shutdownRequest)
rjob <- if (down) logger.finfo("") *> permits.release *> (None: Option[RJob]).pure[F] rjob <- if (down)
logger.finfo("") *> permits.release *> (None: Option[RJob]).pure[F]
else else
queue.nextJob( queue.nextJob(
group => state.modify(_.nextPrio(group, config.countingScheme)), group => state.modify(_.nextPrio(group, config.countingScheme)),
@ -151,7 +158,11 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
} yield () } yield ()
def onStart(job: RJob): F[Unit] = def onStart(job: RJob): F[Unit] =
QJob.setRunning(job.id, config.name, store) //also increments retries if current state=stuck QJob.setRunning(
job.id,
config.name,
store
) //also increments retries if current state=stuck
def wrapTask( def wrapTask(
job: RJob, job: RJob,
@ -159,7 +170,9 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
ctx: Context[F, String] ctx: Context[F, String]
): Task[F, String, Unit] = ): Task[F, String, Unit] =
task task
.mapF(fa => onStart(job) *> logger.fdebug("Starting task now") *> blocker.blockOn(fa)) .mapF(fa =>
onStart(job) *> logger.fdebug("Starting task now") *> blocker.blockOn(fa)
)
.mapF(_.attempt.flatMap({ .mapF(_.attempt.flatMap({
case Right(()) => case Right(()) =>
logger.info(s"Job execution successful: ${job.info}") logger.info(s"Job execution successful: ${job.info}")
@ -196,7 +209,12 @@ final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
onFinish(job, JobState.Stuck) onFinish(job, JobState.Stuck)
}) })
def forkRun(job: RJob, code: F[Unit], onCancel: F[Unit], ctx: Context[F, String]): F[F[Unit]] = { def forkRun(
job: RJob,
code: F[Unit],
onCancel: F[Unit],
ctx: Context[F, String]
): F[F[Unit]] = {
val bfa = blocker.blockOn(code) val bfa = blocker.blockOn(code)
logger.fdebug(s"Forking job ${job.info}") *> logger.fdebug(s"Forking job ${job.info}") *>
ConcurrentEffect[F] ConcurrentEffect[F]
@ -236,10 +254,16 @@ object SchedulerImpl {
} }
def addRunning(job: RJob, token: CancelToken[F]): (State[F], Unit) = def addRunning(job: RJob, token: CancelToken[F]): (State[F], Unit) =
(State(counters, cancelled, cancelTokens.updated(job.id, token), shutdownRequest), ()) (
State(counters, cancelled, cancelTokens.updated(job.id, token), shutdownRequest),
()
)
def removeRunning(job: RJob): (State[F], Unit) = def removeRunning(job: RJob): (State[F], Unit) =
(copy(cancelled = cancelled - job.id, cancelTokens = cancelTokens.removed(job.id)), ()) (
copy(cancelled = cancelled - job.id, cancelTokens = cancelTokens.removed(job.id)),
()
)
def markCancelled(job: RJob): (State[F], Unit) = def markCancelled(job: RJob): (State[F], Unit) =
(copy(cancelled = cancelled + job.id), ()) (copy(cancelled = cancelled + job.id), ())

View File

@ -25,11 +25,13 @@ trait Task[F[_], A, B] {
def mapF[C](f: F[B] => F[C]): Task[F, A, C] = def mapF[C](f: F[B] => F[C]): Task[F, A, C] =
Task(Task.toKleisli(this).mapF(f)) Task(Task.toKleisli(this).mapF(f))
def attempt(implicit F: ApplicativeError[F, Throwable]): Task[F, A, Either[Throwable, B]] = def attempt(
implicit F: ApplicativeError[F, Throwable]
): Task[F, A, Either[Throwable, B]] =
mapF(_.attempt) mapF(_.attempt)
def contramap[C](f: C => F[A])(implicit F: FlatMap[F]): Task[F, C, B] = { ctxc: Context[F, C] => def contramap[C](f: C => F[A])(implicit F: FlatMap[F]): Task[F, C, B] = {
f(ctxc.args).flatMap(a => run(ctxc.map(_ => a))) ctxc: Context[F, C] => f(ctxc.args).flatMap(a => run(ctxc.map(_ => a)))
} }
} }

View File

@ -18,9 +18,15 @@ case class Config(
object Config { object Config {
val postgres = val postgres =
JdbcConfig(LenientUri.unsafe("jdbc:postgresql://localhost:5432/docspelldev"), "dev", "dev") JdbcConfig(
LenientUri.unsafe("jdbc:postgresql://localhost:5432/docspelldev"),
"dev",
"dev"
)
val h2 = JdbcConfig( val h2 = JdbcConfig(
LenientUri.unsafe("jdbc:h2:./target/docspelldev.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE"), LenientUri.unsafe(
"jdbc:h2:./target/docspelldev.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE"
),
"sa", "sa",
"" ""
) )

View File

@ -12,8 +12,10 @@ import org.log4s._
object Main extends IOApp { object Main extends IOApp {
private[this] val logger = getLogger private[this] val logger = getLogger
val blockingEC = ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-restserver-blocking")) val blockingEC =
val connectEC = ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-dbconnect")) ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-restserver-blocking"))
val connectEC =
ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-dbconnect"))
def run(args: List[String]) = { def run(args: List[String]) = {
args match { args match {
@ -53,9 +55,17 @@ object Main extends IOApp {
logger.info(s"\n${banner.render("***>")}") logger.info(s"\n${banner.render("***>")}")
pools.use(p => pools.use(p =>
RestServer.stream[IO](cfg, p.connectEC, p.clientEC, p.blocker).compile.drain.as(ExitCode.Success) RestServer
.stream[IO](cfg, p.connectEC, p.clientEC, p.blocker)
.compile
.drain
.as(ExitCode.Success)
) )
} }
case class Pools(connectEC: ExecutionContext, clientEC: ExecutionContext, blocker: Blocker) case class Pools(
connectEC: ExecutionContext,
clientEC: ExecutionContext,
blocker: Blocker
)
} }

View File

@ -91,7 +91,8 @@ trait Conversions {
) )
def mkAttachment(item: OItem.ItemData)(ra: RAttachment, m: FileMeta): Attachment = { def mkAttachment(item: OItem.ItemData)(ra: RAttachment, m: FileMeta): Attachment = {
val converted = item.sources.find(_._1.id == ra.id).exists(_._2.checksum != m.checksum) val converted =
item.sources.find(_._1.id == ra.id).exists(_._2.checksum != m.checksum)
Attachment(ra.id, ra.name, m.length, MimeType.unsafe(m.mimetype.asString), converted) Attachment(ra.id, ra.name, m.length, MimeType.unsafe(m.mimetype.asString), converted)
} }
@ -107,7 +108,8 @@ trait Conversions {
OItem.Query( OItem.Query(
coll, coll,
m.name, m.name,
if (m.inbox) Seq(ItemState.Created) else Seq(ItemState.Created, ItemState.Confirmed), if (m.inbox) Seq(ItemState.Created)
else Seq(ItemState.Created, ItemState.Confirmed),
m.direction, m.direction,
m.corrPerson, m.corrPerson,
m.corrOrg, m.corrOrg,
@ -127,7 +129,8 @@ trait Conversions {
def mkGroup(g: (String, Vector[OItem.ListItem])): ItemLightGroup = def mkGroup(g: (String, Vector[OItem.ListItem])): ItemLightGroup =
ItemLightGroup(g._1, g._2.map(mkItemLight).toList) ItemLightGroup(g._1, g._2.map(mkItemLight).toList)
val gs = groups.map(mkGroup _).toList.sortWith((g1, g2) => g1.name.compareTo(g2.name) >= 0) val gs =
groups.map(mkGroup _).toList.sortWith((g1, g2) => g1.name.compareTo(g2.name) >= 0)
ItemLightList(gs) ItemLightList(gs)
} }
@ -203,13 +206,16 @@ trait Conversions {
val meta: F[(Boolean, UploadMeta)] = mp.parts val meta: F[(Boolean, UploadMeta)] = mp.parts
.find(_.name.exists(_.equalsIgnoreCase("meta"))) .find(_.name.exists(_.equalsIgnoreCase("meta")))
.map(p => parseMeta(p.body)) .map(p => parseMeta(p.body))
.map(fm => fm.map(m => (m.multiple, UploadMeta(m.direction, "webapp", validFileTypes)))) .map(fm =>
fm.map(m => (m.multiple, UploadMeta(m.direction, "webapp", validFileTypes)))
)
.getOrElse((true, UploadMeta(None, "webapp", validFileTypes)).pure[F]) .getOrElse((true, UploadMeta(None, "webapp", validFileTypes)).pure[F])
val files = mp.parts val files = mp.parts
.filter(p => p.name.forall(s => !s.equalsIgnoreCase("meta"))) .filter(p => p.name.forall(s => !s.equalsIgnoreCase("meta")))
.map(p => .map(p =>
OUpload.File(p.filename, p.headers.get(`Content-Type`).map(fromContentType), p.body) OUpload
.File(p.filename, p.headers.get(`Content-Type`).map(fromContentType), p.body)
) )
for { for {
metaData <- meta metaData <- meta
@ -252,7 +258,10 @@ trait Conversions {
} yield OOrganization.OrgAndContacts(org, cont) } yield OOrganization.OrgAndContacts(org, cont)
} }
def changeOrg[F[_]: Sync](v: Organization, cid: Ident): F[OOrganization.OrgAndContacts] = { def changeOrg[F[_]: Sync](
v: Organization,
cid: Ident
): F[OOrganization.OrgAndContacts] = {
def contacts(oid: Ident) = def contacts(oid: Ident) =
v.contacts.traverse(c => newContact(c, oid.some, None)) v.contacts.traverse(c => newContact(c, oid.some, None))
for { for {
@ -306,7 +315,10 @@ trait Conversions {
} yield OOrganization.PersonAndContacts(org, cont) } yield OOrganization.PersonAndContacts(org, cont)
} }
def changePerson[F[_]: Sync](v: Person, cid: Ident): F[OOrganization.PersonAndContacts] = { def changePerson[F[_]: Sync](
v: Person,
cid: Ident
): F[OOrganization.PersonAndContacts] = {
def contacts(pid: Ident) = def contacts(pid: Ident) =
v.contacts.traverse(c => newContact(c, None, pid.some)) v.contacts.traverse(c => newContact(c, None, pid.some))
for { for {
@ -330,7 +342,11 @@ trait Conversions {
def mkContact(rc: RContact): Contact = def mkContact(rc: RContact): Contact =
Contact(rc.contactId, rc.value, rc.kind) Contact(rc.contactId, rc.value, rc.kind)
def newContact[F[_]: Sync](c: Contact, oid: Option[Ident], pid: Option[Ident]): F[RContact] = def newContact[F[_]: Sync](
c: Contact,
oid: Option[Ident],
pid: Option[Ident]
): F[RContact] =
timeId.map { timeId.map {
case (id, now) => case (id, now) =>
RContact(id, c.value, c.kind, pid, oid, now) RContact(id, c.value, c.kind, pid, oid, now)
@ -395,7 +411,16 @@ trait Conversions {
}) })
def changeSource[F[_]: Sync](s: Source, coll: Ident): RSource = def changeSource[F[_]: Sync](s: Source, coll: Ident): RSource =
RSource(s.id, coll, s.abbrev, s.description, s.counter, s.enabled, s.priority, s.created) RSource(
s.id,
coll,
s.abbrev,
s.description,
s.counter,
s.enabled,
s.priority,
s.created
)
// equipment // equipment
def mkEquipment(re: REquipment): Equipment = def mkEquipment(re: REquipment): Equipment =
@ -422,7 +447,8 @@ trait Conversions {
case JobCancelResult.JobNotFound => BasicResult(false, "Job not found") case JobCancelResult.JobNotFound => BasicResult(false, "Job not found")
case JobCancelResult.CancelRequested => case JobCancelResult.CancelRequested =>
BasicResult(true, "Cancel was requested at the job executor") BasicResult(true, "Cancel was requested at the job executor")
case JobCancelResult.Removed => BasicResult(true, "The job has been removed from the queue.") case JobCancelResult.Removed =>
BasicResult(true, "The job has been removed from the queue.")
} }
def basicResult(ar: AddResult, successMsg: String): BasicResult = ar match { def basicResult(ar: AddResult, successMsg: String): BasicResult = ar match {
@ -439,7 +465,8 @@ trait Conversions {
def basicResult(cr: PassChangeResult): BasicResult = cr match { def basicResult(cr: PassChangeResult): BasicResult = cr match {
case PassChangeResult.Success => BasicResult(true, "Password changed.") case PassChangeResult.Success => BasicResult(true, "Password changed.")
case PassChangeResult.UpdateFailed => BasicResult(false, "The database update failed.") case PassChangeResult.UpdateFailed =>
BasicResult(false, "The database update failed.")
case PassChangeResult.PasswordMismatch => case PassChangeResult.PasswordMismatch =>
BasicResult(false, "The current password is incorrect.") BasicResult(false, "The current password is incorrect.")
case PassChangeResult.UserNotFound => BasicResult(false, "User not found.") case PassChangeResult.UserNotFound => BasicResult(false, "User not found.")
@ -448,7 +475,11 @@ trait Conversions {
// MIME Type // MIME Type
def fromContentType(header: `Content-Type`): MimeType = def fromContentType(header: `Content-Type`): MimeType =
MimeType(header.mediaType.mainType, header.mediaType.subType, header.mediaType.extensions) MimeType(
header.mediaType.mainType,
header.mediaType.subType,
header.mediaType.extensions
)
} }
object Conversions extends Conversions { object Conversions extends Conversions {

View File

@ -23,7 +23,9 @@ object AttachmentRoutes {
val dsl = new Http4sDsl[F] {} val dsl = new Http4sDsl[F] {}
import dsl._ import dsl._
def withResponseHeaders(resp: F[Response[F]])(data: OItem.BinaryData[F]): F[Response[F]] = { def withResponseHeaders(
resp: F[Response[F]]
)(data: OItem.BinaryData[F]): F[Response[F]] = {
val mt = MediaType.unsafeParse(data.meta.mimetype.asString) val mt = MediaType.unsafeParse(data.meta.mimetype.asString)
val ctype = `Content-Type`(mt) val ctype = `Content-Type`(mt)
val cntLen: Header = `Content-Length`.unsafeFromLong(data.meta.length) val cntLen: Header = `Content-Length`.unsafeFromLong(data.meta.length)
@ -104,7 +106,6 @@ object AttachmentRoutes {
.getOrElse(NotFound(BasicResult(false, "Not found"))) .getOrElse(NotFound(BasicResult(false, "Not found")))
} yield resp } yield resp
case GET -> Root / Ident(id) / "view" => case GET -> Root / Ident(id) / "view" =>
// this route exists to provide a stable url // this route exists to provide a stable url
// it redirects currently to viewerjs // it redirects currently to viewerjs

View File

@ -44,7 +44,8 @@ object CheckFileRoutes {
private def convert(v: Vector[RItem]): CheckFileResult = private def convert(v: Vector[RItem]): CheckFileResult =
CheckFileResult( CheckFileResult(
v.nonEmpty, v.nonEmpty,
v.map(r => BasicItem(r.id, r.name, r.direction, r.state, r.created, r.itemDate)).toList v.map(r => BasicItem(r.id, r.name, r.direction, r.state, r.created, r.itemDate))
.toList
) )
} }

View File

@ -28,7 +28,8 @@ object CollectiveRoutes {
case req @ POST -> Root / "settings" => case req @ POST -> Root / "settings" =>
for { for {
settings <- req.as[CollectiveSettings] settings <- req.as[CollectiveSettings]
res <- backend.collective.updateLanguage(user.account.collective, settings.language) res <- backend.collective
.updateLanguage(user.account.collective, settings.language)
resp <- Ok(Conversions.basicResult(res, "Language updated.")) resp <- Ok(Conversions.basicResult(res, "Language updated."))
} yield resp } yield resp
@ -39,7 +40,8 @@ object CollectiveRoutes {
resp <- sett.toResponse() resp <- sett.toResponse()
} yield resp } yield resp
case GET -> Root / "contacts" :? QueryParam.QueryOpt(q) +& QueryParam.ContactKindOpt(kind) => case GET -> Root / "contacts" :? QueryParam.QueryOpt(q) +& QueryParam
.ContactKindOpt(kind) =>
for { for {
res <- backend.collective res <- backend.collective
.getContacts(user.account.collective, q.map(_.q), kind) .getContacts(user.account.collective, q.map(_.q), kind)

View File

@ -36,7 +36,9 @@ object ItemRoutes {
for { for {
item <- backend.item.findItem(id, user.account.collective) item <- backend.item.findItem(id, user.account.collective)
result = item.map(Conversions.mkItemDetail) result = item.map(Conversions.mkItemDetail)
resp <- result.map(r => Ok(r)).getOrElse(NotFound(BasicResult(false, "Not found."))) resp <- result
.map(r => Ok(r))
.getOrElse(NotFound(BasicResult(false, "Not found.")))
} yield resp } yield resp
case POST -> Root / Ident(id) / "confirm" => case POST -> Root / Ident(id) / "confirm" =>
@ -103,7 +105,11 @@ object ItemRoutes {
case req @ POST -> Root / Ident(id) / "name" => case req @ POST -> Root / Ident(id) / "name" =>
for { for {
text <- req.as[OptionalText] text <- req.as[OptionalText]
res <- backend.item.setName(id, text.text.notEmpty.getOrElse(""), user.account.collective) res <- backend.item.setName(
id,
text.text.notEmpty.getOrElse(""),
user.account.collective
)
resp <- Ok(Conversions.basicResult(res, "Name updated")) resp <- Ok(Conversions.basicResult(res, "Name updated"))
} yield resp } yield resp

View File

@ -40,7 +40,8 @@ object MailSendRoutes {
for { for {
rec <- s.recipients.traverse(MailAddress.parse) rec <- s.recipients.traverse(MailAddress.parse)
fileIds <- s.attachmentIds.traverse(Ident.fromString) fileIds <- s.attachmentIds.traverse(Ident.fromString)
sel = if (s.addAllAttachments) AttachSelection.All else AttachSelection.Selected(fileIds) sel = if (s.addAllAttachments) AttachSelection.All
else AttachSelection.Selected(fileIds)
} yield ItemMail(item, s.subject, rec, s.body, sel) } yield ItemMail(item, s.subject, rec, s.body, sel)
def convertOut(res: SendResult): BasicResult = def convertOut(res: SendResult): BasicResult =
@ -50,7 +51,10 @@ object MailSendRoutes {
case SendResult.SendFailure(ex) => case SendResult.SendFailure(ex) =>
BasicResult(false, s"Mail sending failed: ${ex.getMessage}") BasicResult(false, s"Mail sending failed: ${ex.getMessage}")
case SendResult.StoreFailure(ex) => case SendResult.StoreFailure(ex) =>
BasicResult(false, s"Mail was sent, but could not be store to database: ${ex.getMessage}") BasicResult(
false,
s"Mail was sent, but could not be store to database: ${ex.getMessage}"
)
case SendResult.NotFound => case SendResult.NotFound =>
BasicResult(false, s"There was no mail-connection or item found.") BasicResult(false, s"There was no mail-connection or item found.")
} }

View File

@ -43,7 +43,9 @@ object MailSettingsRoutes {
(for { (for {
in <- OptionT.liftF(req.as[EmailSettings]) in <- OptionT.liftF(req.as[EmailSettings])
ru = makeSettings(in) ru = makeSettings(in)
up <- OptionT.liftF(ru.traverse(r => backend.mail.createSettings(user.account, r))) up <- OptionT.liftF(
ru.traverse(r => backend.mail.createSettings(user.account, r))
)
resp <- OptionT.liftF( resp <- OptionT.liftF(
Ok( Ok(
up.fold( up.fold(
@ -58,7 +60,9 @@ object MailSettingsRoutes {
(for { (for {
in <- OptionT.liftF(req.as[EmailSettings]) in <- OptionT.liftF(req.as[EmailSettings])
ru = makeSettings(in) ru = makeSettings(in)
up <- OptionT.liftF(ru.traverse(r => backend.mail.updateSettings(user.account, name, r))) up <- OptionT.liftF(
ru.traverse(r => backend.mail.updateSettings(user.account, name, r))
)
resp <- OptionT.liftF( resp <- OptionT.liftF(
Ok( Ok(
up.fold( up.fold(

View File

@ -19,7 +19,11 @@ import org.log4s._
object UploadRoutes { object UploadRoutes {
private[this] val logger = getLogger private[this] val logger = getLogger
def secured[F[_]: Effect](backend: BackendApp[F], cfg: Config, user: AuthToken): HttpRoutes[F] = { def secured[F[_]: Effect](
backend: BackendApp[F],
cfg: Config,
user: AuthToken
): HttpRoutes[F] = {
val dsl = new Http4sDsl[F] with ResponseGenerator[F] {} val dsl = new Http4sDsl[F] with ResponseGenerator[F] {}
import dsl._ import dsl._
@ -51,7 +55,12 @@ object UploadRoutes {
case req @ POST -> Root / "item" / Ident(id) => case req @ POST -> Root / "item" / Ident(id) =>
for { for {
multipart <- req.as[Multipart[F]] multipart <- req.as[Multipart[F]]
updata <- readMultipart(multipart, logger, Priority.Low, cfg.backend.files.validMimeTypes) updata <- readMultipart(
multipart,
logger,
Priority.Low,
cfg.backend.files.validMimeTypes
)
result <- backend.upload.submit(updata, id) result <- backend.upload.submit(updata, id)
res <- Ok(basicResult(result)) res <- Ok(basicResult(result))
} yield res } yield res

View File

@ -29,7 +29,9 @@ object TemplateRoutes {
def apply[F[_]: Effect](blocker: Blocker, cfg: Config)( def apply[F[_]: Effect](blocker: Blocker, cfg: Config)(
implicit C: ContextShift[F] implicit C: ContextShift[F]
): InnerRoutes[F] = { ): InnerRoutes[F] = {
val indexTemplate = memo(loadResource("/index.html").flatMap(loadTemplate(_, blocker))) val indexTemplate = memo(
loadResource("/index.html").flatMap(loadTemplate(_, blocker))
)
val docTemplate = memo(loadResource("/doc.html").flatMap(loadTemplate(_, blocker))) val docTemplate = memo(loadResource("/doc.html").flatMap(loadTemplate(_, blocker)))
val dsl = new Http4sDsl[F] {} val dsl = new Http4sDsl[F] {}
@ -62,7 +64,9 @@ object TemplateRoutes {
r.pure[F] r.pure[F]
} }
def loadUrl[F[_]: Sync](url: URL, blocker: Blocker)(implicit C: ContextShift[F]): F[String] = def loadUrl[F[_]: Sync](url: URL, blocker: Blocker)(
implicit C: ContextShift[F]
): F[String] =
Stream Stream
.bracket(Sync[F].delay(url.openStream))(in => Sync[F].delay(in.close())) .bracket(Sync[F].delay(url.openStream))(in => Sync[F].delay(in.close()))
.flatMap(in => io.readInputStream(in.pure[F], 64 * 1024, blocker, false)) .flatMap(in => io.readInputStream(in.pure[F], 64 * 1024, blocker, false))

View File

@ -9,7 +9,9 @@ import org.http4s.server.staticcontent.WebjarService.{WebjarAsset, Config => Web
object WebjarRoutes { object WebjarRoutes {
def appRoutes[F[_]: Effect](blocker: Blocker)(implicit C: ContextShift[F]): HttpRoutes[F] = def appRoutes[F[_]: Effect](
blocker: Blocker
)(implicit C: ContextShift[F]): HttpRoutes[F] =
webjarService( webjarService(
WebjarConfig( WebjarConfig(
filter = assetFilter, filter = assetFilter,

View File

@ -15,7 +15,10 @@ sealed trait AddResult {
object AddResult { object AddResult {
def fromUpdate(e: Either[Throwable, Int]): AddResult = def fromUpdate(e: Either[Throwable, Int]): AddResult =
e.fold(Failure, n => if (n > 0) Success else Failure(new Exception("No rows updated"))) e.fold(
Failure,
n => if (n > 0) Success else Failure(new Exception("No rows updated"))
)
case object Success extends AddResult { case object Success extends AddResult {
def toEither = Right(()) def toEither = Right(())

View File

@ -88,7 +88,6 @@ trait DoobieMeta extends EmilDoobieMeta {
implicit val metaLanguage: Meta[Language] = implicit val metaLanguage: Meta[Language] =
Meta[String].imap(Language.unsafe)(_.iso3) Meta[String].imap(Language.unsafe)(_.iso3)
implicit val metaCalEvent: Meta[CalEvent] = implicit val metaCalEvent: Meta[CalEvent] =
Meta[String].timap(CalEvent.unsafe)(_.asString) Meta[String].timap(CalEvent.unsafe)(_.asString)
} }

View File

@ -27,7 +27,9 @@ trait DoobieSyntax {
and(f0 :: fs.toList) and(f0 :: fs.toList)
def or(fs: Seq[Fragment]): Fragment = def or(fs: Seq[Fragment]): Fragment =
Fragment.const(" (") ++ fs.reduce(_ ++ Fragment.const(" OR ") ++ _) ++ Fragment.const(") ") Fragment.const(" (") ++ fs.reduce(_ ++ Fragment.const(" OR ") ++ _) ++ Fragment.const(
") "
)
def or(f0: Fragment, fs: Fragment*): Fragment = def or(f0: Fragment, fs: Fragment*): Fragment =
or(f0 :: fs.toList) or(f0 :: fs.toList)
@ -42,7 +44,9 @@ trait DoobieSyntax {
fr"ORDER BY" ++ commas(c0 :: cs.toList) fr"ORDER BY" ++ commas(c0 :: cs.toList)
def updateRow(table: Fragment, where: Fragment, setter: Fragment): Fragment = def updateRow(table: Fragment, where: Fragment, setter: Fragment): Fragment =
Fragment.const("UPDATE ") ++ table ++ Fragment.const(" SET ") ++ setter ++ this.where(where) Fragment.const("UPDATE ") ++ table ++ Fragment.const(" SET ") ++ setter ++ this.where(
where
)
def insertRow(table: Fragment, cols: List[Column], vals: Fragment): Fragment = def insertRow(table: Fragment, cols: List[Column], vals: Fragment): Fragment =
Fragment.const("INSERT INTO ") ++ table ++ Fragment.const(" (") ++ Fragment.const("INSERT INTO ") ++ table ++ Fragment.const(" (") ++
@ -66,7 +70,8 @@ trait DoobieSyntax {
Fragment.const(") FROM ") ++ table ++ this.where(where) Fragment.const(") FROM ") ++ table ++ this.where(where)
def selectCount(col: Column, table: Fragment, where: Fragment): Fragment = def selectCount(col: Column, table: Fragment, where: Fragment): Fragment =
Fragment.const("SELECT COUNT(") ++ col.f ++ Fragment.const(") FROM ") ++ table ++ this.where( Fragment.const("SELECT COUNT(") ++ col.f ++ Fragment.const(") FROM ") ++ table ++ this
.where(
where where
) )
@ -74,7 +79,8 @@ trait DoobieSyntax {
fr"DELETE FROM" ++ table ++ this.where(where) fr"DELETE FROM" ++ table ++ this.where(where)
def withCTE(ps: (String, Fragment)*): Fragment = { def withCTE(ps: (String, Fragment)*): Fragment = {
val subsel: Seq[Fragment] = ps.map(p => Fragment.const(p._1) ++ fr"AS (" ++ p._2 ++ fr")") val subsel: Seq[Fragment] =
ps.map(p => Fragment.const(p._1) ++ fr"AS (" ++ p._2 ++ fr")")
fr"WITH" ++ commas(subsel) fr"WITH" ++ commas(subsel)
} }

View File

@ -9,9 +9,15 @@ import docspell.store.{AddResult, JdbcConfig, Store}
import doobie._ import doobie._
import doobie.implicits._ import doobie.implicits._
final class StoreImpl[F[_]: Effect](jdbc: JdbcConfig, xa: Transactor[F]) extends Store[F] { final class StoreImpl[F[_]: Effect](jdbc: JdbcConfig, xa: Transactor[F])
extends Store[F] {
val bitpeaceCfg = val bitpeaceCfg =
BitpeaceConfig("filemeta", "filechunk", TikaMimetypeDetect, Ident.randomId[F].map(_.id)) BitpeaceConfig(
"filemeta",
"filechunk",
TikaMimetypeDetect,
Ident.randomId[F].map(_.id)
)
def migrate: F[Int] = def migrate: F[Int] =
FlywayMigrate.run[F](jdbc) FlywayMigrate.run[F](jdbc)

View File

@ -15,7 +15,9 @@ object FlywayMigrate {
val name = if (dbtype == "h2") "postgresql" else dbtype val name = if (dbtype == "h2") "postgresql" else dbtype
List(s"classpath:db/migration/${name}") List(s"classpath:db/migration/${name}")
case None => case None =>
logger.warn(s"Cannot read database name from jdbc url: ${jdbc.url}. Go with PostgreSQL") logger.warn(
s"Cannot read database name from jdbc url: ${jdbc.url}. Go with PostgreSQL"
)
List("classpath:db/postgresql") List("classpath:db/postgresql")
} }

View File

@ -52,7 +52,7 @@ object QAttachment {
.foldMonoid .foldMonoid
} yield n + f } yield n + f
def deleteArchive[F[_]: Sync](store: Store[F])(attachId: Ident): F[Int] = { def deleteArchive[F[_]: Sync](store: Store[F])(attachId: Ident): F[Int] =
(for { (for {
aa <- OptionT(store.transact(RAttachmentArchive.findById(attachId))) aa <- OptionT(store.transact(RAttachmentArchive.findById(attachId)))
n <- OptionT.liftF(store.transact(RAttachmentArchive.deleteAll(aa.fileId))) n <- OptionT.liftF(store.transact(RAttachmentArchive.deleteAll(aa.fileId)))
@ -64,7 +64,6 @@ object QAttachment {
.drain .drain
) )
} yield n).getOrElse(0) } yield n).getOrElse(0)
}
def deleteItemAttachments[F[_]: Sync]( def deleteItemAttachments[F[_]: Sync](
store: Store[F] store: Store[F]

View File

@ -10,7 +10,12 @@ import docspell.common.ContactKind
object QCollective { object QCollective {
case class InsightData(incoming: Int, outgoing: Int, bytes: Long, tags: Map[String, Int]) case class InsightData(
incoming: Int,
outgoing: Int,
bytes: Long,
tags: Map[String, Int]
)
def getInsights(coll: Ident): ConnectionIO[InsightData] = { def getInsights(coll: Ident): ConnectionIO[InsightData] = {
val IC = RItem.Columns val IC = RItem.Columns
@ -49,7 +54,9 @@ object QCollective {
fr"count(" ++ RC.itemId.prefix("r").f ++ fr")" fr"count(" ++ RC.itemId.prefix("r").f ++ fr")"
) ++ ) ++
fr"FROM" ++ RTagItem.table ++ fr"r" ++ fr"FROM" ++ RTagItem.table ++ fr"r" ++
fr"INNER JOIN" ++ RTag.table ++ fr"t ON" ++ RC.tagId.prefix("r").is(TC.tid.prefix("t")) ++ fr"INNER JOIN" ++ RTag.table ++ fr"t ON" ++ RC.tagId
.prefix("r")
.is(TC.tid.prefix("t")) ++
fr"WHERE" ++ TC.cid.prefix("t").is(coll) ++ fr"WHERE" ++ TC.cid.prefix("t").is(coll) ++
fr"GROUP BY" ++ TC.name.prefix("t").f fr"GROUP BY" ++ TC.name.prefix("t").f

View File

@ -87,7 +87,9 @@ object QItem {
srcs <- sources srcs <- sources
arch <- archives arch <- archives
ts <- tags ts <- tags
} yield data.map(d => ItemData(d._1, d._2, d._3, d._4, d._5, d._6, ts, att, srcs, arch)) } yield data.map(d =>
ItemData(d._1, d._2, d._3, d._4, d._5, d._6, ts, att, srcs, arch)
)
} }
case class ListItem( case class ListItem(

View File

@ -17,13 +17,19 @@ object QJob {
def takeNextJob[F[_]: Effect]( def takeNextJob[F[_]: Effect](
store: Store[F] store: Store[F]
)(priority: Ident => F[Priority], worker: Ident, retryPause: Duration): F[Option[RJob]] = )(
priority: Ident => F[Priority],
worker: Ident,
retryPause: Duration
): F[Option[RJob]] =
Stream Stream
.range(0, 10) .range(0, 10)
.evalMap(n => takeNextJob1(store)(priority, worker, retryPause, n)) .evalMap(n => takeNextJob1(store)(priority, worker, retryPause, n))
.evalTap { x => .evalTap { x =>
if (x.isLeft) if (x.isLeft)
logger.fdebug[F]("Cannot mark job, probably due to concurrent updates. Will retry.") logger.fdebug[F](
"Cannot mark job, probably due to concurrent updates. Will retry."
)
else ().pure[F] else ().pure[F]
} }
.find(_.isRight) .find(_.isRight)
@ -54,7 +60,9 @@ object QJob {
} yield if (n == 1) Right(job) else Left(())) } yield if (n == 1) Right(job) else Left(()))
for { for {
_ <- logger.ftrace[F](s"About to take next job (worker ${worker.id}), try $currentTry") _ <- logger.ftrace[F](
s"About to take next job (worker ${worker.id}), try $currentTry"
)
now <- Timestamp.current[F] now <- Timestamp.current[F]
group <- store.transact(selectNextGroup(worker, now, retryPause)) group <- store.transact(selectNextGroup(worker, now, retryPause))
_ <- logger.ftrace[F](s"Choose group ${group.map(_.id)}") _ <- logger.ftrace[F](s"Choose group ${group.map(_.id)}")
@ -66,7 +74,8 @@ object QJob {
_ <- logger.ftrace[F](s"Found job: ${job.map(_.info)}") _ <- logger.ftrace[F](s"Found job: ${job.map(_.info)}")
res <- job.traverse(j => markJob(j)) res <- job.traverse(j => markJob(j))
} yield res.map(_.map(_.some)).getOrElse { } yield res.map(_.map(_.some)).getOrElse {
if (group.isDefined) Left(()) // if a group was found, but no job someone else was faster if (group.isDefined)
Left(()) // if a group was found, but no job someone else was faster
else Right(None) else Right(None)
} }
} }
@ -103,7 +112,9 @@ object QJob {
union union
.query[Ident] .query[Ident]
.to[List] .to[List]
.map(_.headOption) // either one or two results, but may be empty if RJob table is empty .map(
_.headOption
) // either one or two results, but may be empty if RJob table is empty
} }
def selectNextJob( def selectNextJob(
@ -119,7 +130,8 @@ object QJob {
val waiting: JobState = JobState.Waiting val waiting: JobState = JobState.Waiting
val stuck: JobState = JobState.Stuck val stuck: JobState = JobState.Stuck
val stuckTrigger = coalesce(JC.startedmillis.f, sql"${now.toMillis}") ++ fr"+" ++ power2( val stuckTrigger =
coalesce(JC.startedmillis.f, sql"${now.toMillis}") ++ fr"+" ++ power2(
JC.retries JC.retries
) ++ fr"* ${initialPause.millis}" ) ++ fr"* ${initialPause.millis}"
val sql = selectSimple( val sql = selectSimple(
@ -127,7 +139,10 @@ object QJob {
RJob.table, RJob.table,
and( and(
JC.group.is(group), JC.group.is(group),
or(JC.state.is(waiting), and(JC.state.is(stuck), stuckTrigger ++ fr"< ${now.toMillis}")) or(
JC.state.is(waiting),
and(JC.state.is(stuck), stuckTrigger ++ fr"< ${now.toMillis}")
)
) )
) ++ ) ++
orderBy(JC.state.asc, psort, JC.submitted.asc) ++ orderBy(JC.state.asc, psort, JC.submitted.asc) ++
@ -189,7 +204,9 @@ object QJob {
def findAll[F[_]: Effect](ids: Seq[Ident], store: Store[F]): F[Vector[RJob]] = def findAll[F[_]: Effect](ids: Seq[Ident], store: Store[F]): F[Vector[RJob]] =
store.transact(RJob.findFromIds(ids)) store.transact(RJob.findFromIds(ids))
def queueStateSnapshot(collective: Ident): Stream[ConnectionIO, (RJob, Vector[RJobLog])] = { def queueStateSnapshot(
collective: Ident
): Stream[ConnectionIO, (RJob, Vector[RJobLog])] = {
val JC = RJob.Columns val JC = RJob.Columns
val waiting: Set[JobState] = Set(JobState.Waiting, JobState.Stuck, JobState.Scheduled) val waiting: Set[JobState] = Set(JobState.Waiting, JobState.Stuck, JobState.Scheduled)
val running: Set[JobState] = Set(JobState.Running) val running: Set[JobState] = Set(JobState.Running)

View File

@ -15,7 +15,11 @@ trait JobQueue[F[_]] {
def insertAll(jobs: Seq[RJob]): F[Unit] def insertAll(jobs: Seq[RJob]): F[Unit]
def nextJob(prio: Ident => F[Priority], worker: Ident, retryPause: Duration): F[Option[RJob]] def nextJob(
prio: Ident => F[Priority],
worker: Ident,
retryPause: Duration
): F[Option[RJob]]
} }
object JobQueue { object JobQueue {
@ -29,14 +33,16 @@ object JobQueue {
worker: Ident, worker: Ident,
retryPause: Duration retryPause: Duration
): F[Option[RJob]] = ): F[Option[RJob]] =
logger.ftrace("Select next job") *> QJob.takeNextJob(store)(prio, worker, retryPause) logger
.ftrace("Select next job") *> QJob.takeNextJob(store)(prio, worker, retryPause)
def insert(job: RJob): F[Unit] = def insert(job: RJob): F[Unit] =
store store
.transact(RJob.insert(job)) .transact(RJob.insert(job))
.flatMap { n => .flatMap { n =>
if (n != 1) if (n != 1)
Effect[F].raiseError(new Exception(s"Inserting job failed. Update count: $n")) Effect[F]
.raiseError(new Exception(s"Inserting job failed. Update count: $n"))
else ().pure[F] else ().pure[F]
} }

View File

@ -10,7 +10,6 @@ object Marked {
final case object NotMarkable extends Marked[Nothing] final case object NotMarkable extends Marked[Nothing]
def found[A](v: A): Marked[A] = Found(v) def found[A](v: A): Marked[A] = Found(v)
def notFound[A]: Marked[A] = NotFound def notFound[A]: Marked[A] = NotFound
def notMarkable[A]: Marked[A] = NotMarkable def notMarkable[A]: Marked[A] = NotMarkable

View File

@ -38,7 +38,11 @@ object RAttachment {
fr"${v.id},${v.itemId},${v.fileId.id},${v.position},${v.created},${v.name}" fr"${v.id},${v.itemId},${v.fileId.id},${v.position},${v.created},${v.name}"
).update.run ).update.run
def updateFileIdAndName(attachId: Ident, fId: Ident, fname: Option[String]): ConnectionIO[Int] = def updateFileIdAndName(
attachId: Ident,
fId: Ident,
fname: Option[String]
): ConnectionIO[Int] =
updateRow(table, id.is(attachId), commas(fileId.setTo(fId), name.setTo(fname))).update.run updateRow(table, id.is(attachId), commas(fileId.setTo(fId), name.setTo(fname))).update.run
def updatePosition(attachId: Ident, pos: Int): ConnectionIO[Int] = def updatePosition(attachId: Ident, pos: Int): ConnectionIO[Int] =
@ -55,13 +59,17 @@ object RAttachment {
val aFileMeta = fileId.prefix("a") val aFileMeta = fileId.prefix("a")
val mId = RFileMeta.Columns.id.prefix("m") val mId = RFileMeta.Columns.id.prefix("m")
val from = table ++ fr"a INNER JOIN" ++ RFileMeta.table ++ fr"m ON" ++ aFileMeta.is(mId) val from =
table ++ fr"a INNER JOIN" ++ RFileMeta.table ++ fr"m ON" ++ aFileMeta.is(mId)
val cond = aId.is(attachId) val cond = aId.is(attachId)
selectSimple(cols, from, cond).query[FileMeta].option selectSimple(cols, from, cond).query[FileMeta].option
} }
def findByIdAndCollective(attachId: Ident, collective: Ident): ConnectionIO[Option[RAttachment]] = def findByIdAndCollective(
attachId: Ident,
collective: Ident
): ConnectionIO[Option[RAttachment]] =
selectSimple( selectSimple(
all.map(_.prefix("a")), all.map(_.prefix("a")),
table ++ fr"a," ++ RItem.table ++ fr"i", table ++ fr"a," ++ RItem.table ++ fr"i",
@ -75,7 +83,10 @@ object RAttachment {
def findByItem(id: Ident): ConnectionIO[Vector[RAttachment]] = def findByItem(id: Ident): ConnectionIO[Vector[RAttachment]] =
selectSimple(all, table, itemId.is(id)).query[RAttachment].to[Vector] selectSimple(all, table, itemId.is(id)).query[RAttachment].to[Vector]
def findByItemAndCollective(id: Ident, coll: Ident): ConnectionIO[Vector[RAttachment]] = { def findByItemAndCollective(
id: Ident,
coll: Ident
): ConnectionIO[Vector[RAttachment]] = {
val q = selectSimple(all.map(_.prefix("a")), table ++ fr"a", Fragment.empty) ++ val q = selectSimple(all.map(_.prefix("a")), table ++ fr"a", Fragment.empty) ++
fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ RItem.Columns.id fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ RItem.Columns.id
.prefix("i") .prefix("i")
@ -97,7 +108,8 @@ object RAttachment {
val iId = RItem.Columns.id.prefix("i") val iId = RItem.Columns.id.prefix("i")
val iColl = RItem.Columns.cid.prefix("i") val iColl = RItem.Columns.cid.prefix("i")
val from = table ++ fr"a INNER JOIN" ++ RFileMeta.table ++ fr"m ON" ++ afileMeta.is(mId) ++ val from =
table ++ fr"a INNER JOIN" ++ RFileMeta.table ++ fr"m ON" ++ afileMeta.is(mId) ++
fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ aItem.is(iId) fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ aItem.is(iId)
val cond = Seq(aItem.is(id), iColl.is(coll)) val cond = Seq(aItem.is(id), iColl.is(coll))

View File

@ -20,7 +20,8 @@ case class RAttachmentMeta(
} }
object RAttachmentMeta { object RAttachmentMeta {
def empty(attachId: Ident) = RAttachmentMeta(attachId, None, Nil, MetaProposalList.empty) def empty(attachId: Ident) =
RAttachmentMeta(attachId, None, Nil, MetaProposalList.empty)
val table = fr"attachmentmeta" val table = fr"attachmentmeta"

View File

@ -63,7 +63,9 @@ object RAttachmentSource {
selectSimple(all.map(_.prefix("a")), from, where).query[RAttachmentSource].option selectSimple(all.map(_.prefix("a")), from, where).query[RAttachmentSource].option
} }
def findByItemWithMeta(id: Ident): ConnectionIO[Vector[(RAttachmentSource, FileMeta)]] = { def findByItemWithMeta(
id: Ident
): ConnectionIO[Vector[(RAttachmentSource, FileMeta)]] = {
import bitpeace.sql._ import bitpeace.sql._
val aId = Columns.id.prefix("a") val aId = Columns.id.prefix("a")

View File

@ -7,7 +7,12 @@ import doobie._
import doobie.implicits._ import doobie.implicits._
import fs2.Stream import fs2.Stream
case class RCollective(id: Ident, state: CollectiveState, language: Language, created: Timestamp) case class RCollective(
id: Ident,
state: CollectiveState,
language: Language,
created: Timestamp
)
object RCollective { object RCollective {

View File

@ -40,7 +40,9 @@ object RInvitation {
deleteFrom(table, id.is(invite)).update.run deleteFrom(table, id.is(invite)).update.run
def useInvite(invite: Ident, minCreated: Timestamp): ConnectionIO[Boolean] = { def useInvite(invite: Ident, minCreated: Timestamp): ConnectionIO[Boolean] = {
val get = selectCount(id, table, and(id.is(invite), created.isGt(minCreated))).query[Int].unique val get = selectCount(id, table, and(id.is(invite), created.isGt(minCreated)))
.query[Int]
.unique
for { for {
inv <- get inv <- get
_ <- delete(invite) _ <- delete(invite)

View File

@ -113,7 +113,11 @@ object RItem {
def updateState(itemId: Ident, itemState: ItemState): ConnectionIO[Int] = def updateState(itemId: Ident, itemState: ItemState): ConnectionIO[Int] =
for { for {
t <- currentTime t <- currentTime
n <- updateRow(table, id.is(itemId), commas(state.setTo(itemState), updated.setTo(t))).update.run n <- updateRow(
table,
id.is(itemId),
commas(state.setTo(itemState), updated.setTo(t))
).update.run
} yield n } yield n
def updateStateForCollective( def updateStateForCollective(
@ -160,7 +164,11 @@ object RItem {
).update.run ).update.run
} yield n } yield n
def updateCorrPerson(itemId: Ident, coll: Ident, person: Option[Ident]): ConnectionIO[Int] = def updateCorrPerson(
itemId: Ident,
coll: Ident,
person: Option[Ident]
): ConnectionIO[Int] =
for { for {
t <- currentTime t <- currentTime
n <- updateRow( n <- updateRow(
@ -180,7 +188,11 @@ object RItem {
).update.run ).update.run
} yield n } yield n
def updateConcPerson(itemId: Ident, coll: Ident, person: Option[Ident]): ConnectionIO[Int] = def updateConcPerson(
itemId: Ident,
coll: Ident,
person: Option[Ident]
): ConnectionIO[Int] =
for { for {
t <- currentTime t <- currentTime
n <- updateRow( n <- updateRow(
@ -200,7 +212,11 @@ object RItem {
).update.run ).update.run
} yield n } yield n
def updateConcEquip(itemId: Ident, coll: Ident, equip: Option[Ident]): ConnectionIO[Int] = def updateConcEquip(
itemId: Ident,
coll: Ident,
equip: Option[Ident]
): ConnectionIO[Int] =
for { for {
t <- currentTime t <- currentTime
n <- updateRow( n <- updateRow(
@ -250,7 +266,11 @@ object RItem {
).update.run ).update.run
} yield n } yield n
def updateDueDate(itemId: Ident, coll: Ident, date: Option[Timestamp]): ConnectionIO[Int] = def updateDueDate(
itemId: Ident,
coll: Ident,
date: Option[Timestamp]
): ConnectionIO[Int] =
for { for {
t <- currentTime t <- currentTime
n <- updateRow( n <- updateRow(

View File

@ -6,7 +6,13 @@ import docspell.common._
import docspell.store.impl.Column import docspell.store.impl.Column
import docspell.store.impl.Implicits._ import docspell.store.impl.Implicits._
case class RJobLog(id: Ident, jobId: Ident, level: LogLevel, created: Timestamp, message: String) {} case class RJobLog(
id: Ident,
jobId: Ident,
level: LogLevel,
created: Timestamp,
message: String
) {}
object RJobLog { object RJobLog {
@ -26,7 +32,9 @@ object RJobLog {
insertRow(table, all, fr"${v.id},${v.jobId},${v.level},${v.created},${v.message}").update.run insertRow(table, all, fr"${v.id},${v.jobId},${v.level},${v.created},${v.message}").update.run
def findLogs(id: Ident): ConnectionIO[Vector[RJobLog]] = def findLogs(id: Ident): ConnectionIO[Vector[RJobLog]] =
(selectSimple(all, table, jobId.is(id)) ++ orderBy(created.asc)).query[RJobLog].to[Vector] (selectSimple(all, table, jobId.is(id)) ++ orderBy(created.asc))
.query[RJobLog]
.to[Vector]
def deleteAll(job: Ident): ConnectionIO[Int] = def deleteAll(job: Ident): ConnectionIO[Int] =
deleteFrom(table, jobId.is(job)).update.run deleteFrom(table, jobId.is(job)).update.run

View File

@ -68,7 +68,10 @@ object ROrganization {
} }
def existsByName(coll: Ident, oname: String): ConnectionIO[Boolean] = def existsByName(coll: Ident, oname: String): ConnectionIO[Boolean] =
selectCount(oid, table, and(cid.is(coll), name.is(oname))).query[Int].unique.map(_ > 0) selectCount(oid, table, and(cid.is(coll), name.is(oname)))
.query[Int]
.unique
.map(_ > 0)
def findById(id: Ident): ConnectionIO[Option[ROrganization]] = { def findById(id: Ident): ConnectionIO[Option[ROrganization]] = {
val sql = selectSimple(all, table, cid.is(id)) val sql = selectSimple(all, table, cid.is(id))
@ -93,7 +96,9 @@ object ROrganization {
val CC = RContact.Columns val CC = RContact.Columns
val q = fr"SELECT DISTINCT" ++ commas(oid.prefix("o").f, name.prefix("o").f) ++ val q = fr"SELECT DISTINCT" ++ commas(oid.prefix("o").f, name.prefix("o").f) ++
fr"FROM" ++ table ++ fr"o" ++ fr"FROM" ++ table ++ fr"o" ++
fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.orgId.prefix("c").is(oid.prefix("o")) ++ fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.orgId
.prefix("c")
.is(oid.prefix("o")) ++
fr"WHERE" ++ and( fr"WHERE" ++ and(
cid.prefix("o").is(coll), cid.prefix("o").is(coll),
CC.kind.prefix("c").is(contactKind), CC.kind.prefix("c").is(contactKind),
@ -103,7 +108,10 @@ object ROrganization {
q.query[IdRef].to[Vector] q.query[IdRef].to[Vector]
} }
def findAll(coll: Ident, order: Columns.type => Column): Stream[ConnectionIO, ROrganization] = { def findAll(
coll: Ident,
order: Columns.type => Column
): Stream[ConnectionIO, ROrganization] = {
val sql = selectSimple(all, table, cid.is(coll)) ++ orderBy(order(Columns).f) val sql = selectSimple(all, table, cid.is(coll)) ++ orderBy(order(Columns).f)
sql.query[ROrganization].stream sql.query[ROrganization].stream
} }

View File

@ -71,7 +71,10 @@ object RPerson {
} }
def existsByName(coll: Ident, pname: String): ConnectionIO[Boolean] = def existsByName(coll: Ident, pname: String): ConnectionIO[Boolean] =
selectCount(pid, table, and(cid.is(coll), name.is(pname))).query[Int].unique.map(_ > 0) selectCount(pid, table, and(cid.is(coll), name.is(pname)))
.query[Int]
.unique
.map(_ > 0)
def findById(id: Ident): ConnectionIO[Option[RPerson]] = { def findById(id: Ident): ConnectionIO[Option[RPerson]] = {
val sql = selectSimple(all, table, cid.is(id)) val sql = selectSimple(all, table, cid.is(id))
@ -103,7 +106,9 @@ object RPerson {
val CC = RContact.Columns val CC = RContact.Columns
val q = fr"SELECT DISTINCT" ++ commas(pid.prefix("p").f, name.prefix("p").f) ++ val q = fr"SELECT DISTINCT" ++ commas(pid.prefix("p").f, name.prefix("p").f) ++
fr"FROM" ++ table ++ fr"p" ++ fr"FROM" ++ table ++ fr"p" ++
fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.personId.prefix("c").is(pid.prefix("p")) ++ fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.personId
.prefix("c")
.is(pid.prefix("p")) ++
fr"WHERE" ++ and( fr"WHERE" ++ and(
cid.prefix("p").is(coll), cid.prefix("p").is(coll),
CC.kind.prefix("c").is(contactKind), CC.kind.prefix("c").is(contactKind),
@ -114,7 +119,10 @@ object RPerson {
q.query[IdRef].to[Vector] q.query[IdRef].to[Vector]
} }
def findAll(coll: Ident, order: Columns.type => Column): Stream[ConnectionIO, RPerson] = { def findAll(
coll: Ident,
order: Columns.type => Column
): Stream[ConnectionIO, RPerson] = {
val sql = selectSimple(all, table, cid.is(coll)) ++ orderBy(order(Columns).f) val sql = selectSimple(all, table, cid.is(coll)) ++ orderBy(order(Columns).f)
sql.query[RPerson].stream sql.query[RPerson].stream
} }

View File

@ -37,7 +37,17 @@ object RSentMail {
for { for {
id <- Ident.randomId[F] id <- Ident.randomId[F]
now <- Timestamp.current[F] now <- Timestamp.current[F]
} yield RSentMail(id, uid, messageId, sender, connName, subject, recipients, body, now) } yield RSentMail(
id,
uid,
messageId,
sender,
connName,
subject,
recipients,
body,
now
)
def forItem( def forItem(
itemId: Ident, itemId: Ident,

View File

@ -86,7 +86,10 @@ object RSource {
def findCollective(sourceId: Ident): ConnectionIO[Option[Ident]] = def findCollective(sourceId: Ident): ConnectionIO[Option[Ident]] =
selectSimple(List(cid), table, sid.is(sourceId)).query[Ident].option selectSimple(List(cid), table, sid.is(sourceId)).query[Ident].option
def findAll(coll: Ident, order: Columns.type => Column): ConnectionIO[Vector[RSource]] = { def findAll(
coll: Ident,
order: Columns.type => Column
): ConnectionIO[Vector[RSource]] = {
val sql = selectSimple(all, table, cid.is(coll)) ++ orderBy(order(Columns).f) val sql = selectSimple(all, table, cid.is(coll)) ++ orderBy(order(Columns).f)
sql.query[RSource].to[Vector] sql.query[RSource].to[Vector]
} }

View File

@ -29,7 +29,11 @@ object RTag {
def insert(v: RTag): ConnectionIO[Int] = { def insert(v: RTag): ConnectionIO[Int] = {
val sql = val sql =
insertRow(table, all, fr"${v.tagId},${v.collective},${v.name},${v.category},${v.created}") insertRow(
table,
all,
fr"${v.tagId},${v.collective},${v.name},${v.category},${v.created}"
)
sql.update.run sql.update.run
} }

View File

@ -32,7 +32,8 @@ object RUser {
val lastLogin = Column("lastlogin") val lastLogin = Column("lastlogin")
val created = Column("created") val created = Column("created")
val all = List(uid, login, cid, password, state, email, loginCount, lastLogin, created) val all =
List(uid, login, cid, password, state, email, loginCount, lastLogin, created)
} }
import Columns._ import Columns._

View File

@ -178,7 +178,8 @@ object RUserEmail {
case None => Seq.empty case None => Seq.empty
}) })
(selectSimple(all.map(_.prefix("m")), from, and(cond)) ++ orderBy(mName.f)).query[RUserEmail] (selectSimple(all.map(_.prefix("m")), from, and(cond)) ++ orderBy(mName.f))
.query[RUserEmail]
} }
def findByAccount( def findByAccount(
@ -198,7 +199,8 @@ object RUserEmail {
deleteFrom( deleteFrom(
table, table,
fr"uid in (" ++ selectSimple(Seq(uId), RUser.table, and(cond)) ++ fr") AND" ++ name.is( fr"uid in (" ++ selectSimple(Seq(uId), RUser.table, and(cond)) ++ fr") AND" ++ name
.is(
connName connName
) )
).update.run ).update.run
@ -208,5 +210,8 @@ object RUserEmail {
getByName(accId, name).map(_.isDefined) getByName(accId, name).map(_.isDefined)
def exists(userId: Ident, connName: Ident): ConnectionIO[Boolean] = def exists(userId: Ident, connName: Ident): ConnectionIO[Boolean] =
selectCount(id, table, and(uid.is(userId), name.is(connName))).query[Int].unique.map(_ > 0) selectCount(id, table, and(uid.is(userId), name.is(connName)))
.query[Int]
.unique
.map(_ > 0)
} }