Apply scalafmt to all files

This commit is contained in:
Eike Kettner 2019-12-30 21:44:13 +01:00
parent 57e274e2b0
commit fc3e22e399
133 changed files with 3003 additions and 2112 deletions

View File

@ -27,38 +27,44 @@ trait BackendApp[F[_]] {
object BackendApp {
def create[F[_]: ConcurrentEffect](cfg: Config, store: Store[F], httpClientEc: ExecutionContext): Resource[F, BackendApp[F]] =
def create[F[_]: ConcurrentEffect](
cfg: Config,
store: Store[F],
httpClientEc: ExecutionContext
): Resource[F, BackendApp[F]] =
for {
queue <- JobQueue(store)
loginImpl <- Login[F](store)
signupImpl <- OSignup[F](store)
collImpl <- OCollective[F](store)
sourceImpl <- OSource[F](store)
tagImpl <- OTag[F](store)
equipImpl <- OEquipment[F](store)
orgImpl <- OOrganization(store)
uploadImpl <- OUpload(store, queue, cfg, httpClientEc)
nodeImpl <- ONode(store)
jobImpl <- OJob(store, httpClientEc)
itemImpl <- OItem(store)
queue <- JobQueue(store)
loginImpl <- Login[F](store)
signupImpl <- OSignup[F](store)
collImpl <- OCollective[F](store)
sourceImpl <- OSource[F](store)
tagImpl <- OTag[F](store)
equipImpl <- OEquipment[F](store)
orgImpl <- OOrganization(store)
uploadImpl <- OUpload(store, queue, cfg, httpClientEc)
nodeImpl <- ONode(store)
jobImpl <- OJob(store, httpClientEc)
itemImpl <- OItem(store)
} yield new BackendApp[F] {
val login: Login[F] = loginImpl
val signup: OSignup[F] = signupImpl
val login: Login[F] = loginImpl
val signup: OSignup[F] = signupImpl
val collective: OCollective[F] = collImpl
val source = sourceImpl
val tag = tagImpl
val equipment = equipImpl
val organization = orgImpl
val upload = uploadImpl
val node = nodeImpl
val job = jobImpl
val item = itemImpl
val source = sourceImpl
val tag = tagImpl
val equipment = equipImpl
val organization = orgImpl
val upload = uploadImpl
val node = nodeImpl
val job = jobImpl
val item = itemImpl
}
def apply[F[_]: ConcurrentEffect: ContextShift](cfg: Config
, connectEC: ExecutionContext
, httpClientEc: ExecutionContext
, blocker: Blocker): Resource[F, BackendApp[F]] =
def apply[F[_]: ConcurrentEffect: ContextShift](
cfg: Config,
connectEC: ExecutionContext,
httpClientEc: ExecutionContext,
blocker: Blocker
): Resource[F, BackendApp[F]] =
for {
store <- Store.create(cfg.jdbc, connectEC, blocker)
backend <- create(cfg, store, httpClientEc)

View File

@ -4,13 +4,9 @@ import docspell.backend.signup.{Config => SignupConfig}
import docspell.common.MimeType
import docspell.store.JdbcConfig
case class Config( jdbc: JdbcConfig
, signup: SignupConfig
, files: Config.Files) {
}
case class Config(jdbc: JdbcConfig, signup: SignupConfig, files: Config.Files) {}
object Config {
case class Files(chunkSize: Int, validMimeTypes: Seq[MimeType])
}
}

View File

@ -50,14 +50,13 @@ object AuthToken {
Left("Invalid authenticator")
}
def user[F[_]: Sync](accountId: AccountId, key: ByteVector): F[AuthToken] = {
def user[F[_]: Sync](accountId: AccountId, key: ByteVector): F[AuthToken] =
for {
salt <- Common.genSaltString[F]
millis = Instant.now.toEpochMilli
cd = AuthToken(millis, accountId, salt, "")
sig = sign(cd, key)
cd = AuthToken(millis, accountId, salt, "")
sig = sign(cd, key)
} yield cd.copy(sig = sig)
}
private def sign(cd: AuthToken, key: ByteVector): String = {
val raw = cd.millis.toString + cd.account.asString + cd.salt

View File

@ -45,45 +45,45 @@ object Login {
}
def ok(session: AuthToken): Result = Ok(session)
def invalidAuth: Result = InvalidAuth
def invalidTime: Result = InvalidTime
def invalidAuth: Result = InvalidAuth
def invalidTime: Result = InvalidTime
}
def apply[F[_]: Effect](store: Store[F]): Resource[F, Login[F]] = Resource.pure(new Login[F] {
def apply[F[_]: Effect](store: Store[F]): Resource[F, Login[F]] =
Resource.pure(new Login[F] {
def loginSession(config: Config)(sessionKey: String): F[Result] =
AuthToken.fromString(sessionKey) match {
case Right(at) =>
if (at.sigInvalid(config.serverSecret)) Result.invalidAuth.pure[F]
else if (at.isExpired(config.sessionValid)) Result.invalidTime.pure[F]
else Result.ok(at).pure[F]
case Left(_) =>
Result.invalidAuth.pure[F]
def loginSession(config: Config)(sessionKey: String): F[Result] =
AuthToken.fromString(sessionKey) match {
case Right(at) =>
if (at.sigInvalid(config.serverSecret)) Result.invalidAuth.pure[F]
else if (at.isExpired(config.sessionValid)) Result.invalidTime.pure[F]
else Result.ok(at).pure[F]
case Left(_) =>
Result.invalidAuth.pure[F]
}
def loginUserPass(config: Config)(up: UserPass): F[Result] =
AccountId.parse(up.user) match {
case Right(acc) =>
val okResult =
store.transact(RUser.updateLogin(acc)) *>
AuthToken.user(acc, config.serverSecret).map(Result.ok)
for {
data <- store.transact(QLogin.findUser(acc))
_ <- Sync[F].delay(logger.trace(s"Account lookup: $data"))
res <- if (data.exists(check(up.pass))) okResult
else Result.invalidAuth.pure[F]
} yield res
case Left(_) =>
Result.invalidAuth.pure[F]
}
private def check(given: String)(data: QLogin.Data): Boolean = {
val collOk = data.collectiveState == CollectiveState.Active ||
data.collectiveState == CollectiveState.ReadOnly
val userOk = data.userState == UserState.Active
val passOk = BCrypt.checkpw(given, data.password.pass)
collOk && userOk && passOk
}
def loginUserPass(config: Config)(up: UserPass): F[Result] = {
AccountId.parse(up.user) match {
case Right(acc) =>
val okResult=
store.transact(RUser.updateLogin(acc)) *>
AuthToken.user(acc, config.serverSecret).map(Result.ok)
for {
data <- store.transact(QLogin.findUser(acc))
_ <- Sync[F].delay(logger.trace(s"Account lookup: $data"))
res <- if (data.exists(check(up.pass))) okResult
else Result.invalidAuth.pure[F]
} yield res
case Left(_) =>
Result.invalidAuth.pure[F]
}
}
private def check(given: String)(data: QLogin.Data): Boolean = {
val collOk = data.collectiveState == CollectiveState.Active ||
data.collectiveState == CollectiveState.ReadOnly
val userOk = data.userState == UserState.Active
val passOk = BCrypt.checkpw(given, data.password.pass)
collOk && userOk && passOk
}
})
})
}

View File

@ -25,7 +25,11 @@ trait OCollective[F[_]] {
def insights(collective: Ident): F[InsightData]
def changePassword(accountId: AccountId, current: Password, newPass: Password): F[PassChangeResult]
def changePassword(
accountId: AccountId,
current: Password,
newPass: Password
): F[PassChangeResult]
}
object OCollective {
@ -35,15 +39,15 @@ object OCollective {
sealed trait PassChangeResult
object PassChangeResult {
case object UserNotFound extends PassChangeResult
case object UserNotFound extends PassChangeResult
case object PasswordMismatch extends PassChangeResult
case object UpdateFailed extends PassChangeResult
case object Success extends PassChangeResult
case object UpdateFailed extends PassChangeResult
case object Success extends PassChangeResult
def userNotFound: PassChangeResult = UserNotFound
def userNotFound: PassChangeResult = UserNotFound
def passwordMismatch: PassChangeResult = PasswordMismatch
def success: PassChangeResult = Success
def updateFailed: PassChangeResult = UpdateFailed
def success: PassChangeResult = Success
def updateFailed: PassChangeResult = UpdateFailed
}
case class RegisterData(collName: Ident, login: Ident, password: Password, invite: Option[Ident])
@ -63,39 +67,47 @@ object OCollective {
}
}
def apply[F[_]:Effect](store: Store[F]): Resource[F, OCollective[F]] =
def apply[F[_]: Effect](store: Store[F]): Resource[F, OCollective[F]] =
Resource.pure(new OCollective[F] {
def find(name: Ident): F[Option[RCollective]] =
store.transact(RCollective.findById(name))
def updateLanguage(collective: Ident, lang: Language): F[AddResult] =
store.transact(RCollective.updateLanguage(collective, lang)).
attempt.map(AddResult.fromUpdate)
store
.transact(RCollective.updateLanguage(collective, lang))
.attempt
.map(AddResult.fromUpdate)
def listUser(collective: Ident): F[Vector[RUser]] = {
def listUser(collective: Ident): F[Vector[RUser]] =
store.transact(RUser.findAll(collective, _.login))
}
def add(s: RUser): F[AddResult] =
store.add(RUser.insert(s.copy(password = PasswordCrypt.crypt(s.password))), RUser.exists(s.login))
store.add(
RUser.insert(s.copy(password = PasswordCrypt.crypt(s.password))),
RUser.exists(s.login)
)
def update(s: RUser): F[AddResult] =
store.add(RUser.update(s), RUser.exists(s.login))
def deleteUser(login: Ident, collective: Ident): F[AddResult] =
store.transact(RUser.delete(login, collective)).
attempt.map(AddResult.fromUpdate)
store.transact(RUser.delete(login, collective)).attempt.map(AddResult.fromUpdate)
def insights(collective: Ident): F[InsightData] =
store.transact(QCollective.getInsights(collective))
def changePassword(accountId: AccountId, current: Password, newPass: Password): F[PassChangeResult] = {
def changePassword(
accountId: AccountId,
current: Password,
newPass: Password
): F[PassChangeResult] = {
val q = for {
optUser <- RUser.findByAccount(accountId)
check = optUser.map(_.password).map(p => PasswordCrypt.check(current, p))
n <- check.filter(identity).traverse(_ => RUser.updatePassword(accountId, PasswordCrypt.crypt(newPass)))
res = check match {
check = optUser.map(_.password).map(p => PasswordCrypt.check(current, p))
n <- check
.filter(identity)
.traverse(_ => RUser.updatePassword(accountId, PasswordCrypt.crypt(newPass)))
res = check match {
case Some(true) =>
if (n.getOrElse(0) > 0) PassChangeResult.success else PassChangeResult.updateFailed
case Some(false) =>

View File

@ -17,7 +17,6 @@ trait OEquipment[F[_]] {
def delete(id: Ident, collective: Ident): F[AddResult]
}
object OEquipment {
def apply[F[_]: Effect](store: Store[F]): Resource[F, OEquipment[F]] =
@ -43,12 +42,10 @@ object OEquipment {
def delete(id: Ident, collective: Ident): F[AddResult] = {
val io = for {
n0 <- RItem.removeConcEquip(collective, id)
n1 <- REquipment.delete(id, collective)
n0 <- RItem.removeConcEquip(collective, id)
n1 <- REquipment.delete(id, collective)
} yield n0 + n1
store.transact(io).
attempt.
map(AddResult.fromUpdate)
store.transact(io).attempt.map(AddResult.fromUpdate)
}
})
}

View File

@ -62,90 +62,98 @@ object OItem {
case class AttachmentData[F[_]](ra: RAttachment, meta: FileMeta, data: Stream[F, Byte])
def apply[F[_]: Effect](store: Store[F]): Resource[F, OItem[F]] =
Resource.pure(new OItem[F] {
def findItem(id: Ident, collective: Ident): F[Option[ItemData]] =
store.transact(QItem.findItem(id)).
map(opt => opt.flatMap(_.filterCollective(collective)))
store.transact(QItem.findItem(id)).map(opt => opt.flatMap(_.filterCollective(collective)))
def findItems(q: Query, maxResults: Int): F[Vector[ListItem]] = {
def findItems(q: Query, maxResults: Int): F[Vector[ListItem]] =
store.transact(QItem.findItems(q).take(maxResults.toLong)).compile.toVector
}
def findAttachment(id: Ident, collective: Ident): F[Option[AttachmentData[F]]] = {
store.transact(RAttachment.findByIdAndCollective(id, collective)).
flatMap({
def findAttachment(id: Ident, collective: Ident): F[Option[AttachmentData[F]]] =
store
.transact(RAttachment.findByIdAndCollective(id, collective))
.flatMap({
case Some(ra) =>
store.bitpeace.get(ra.fileId.id).unNoneTerminate.compile.last.
map(_.map(m => AttachmentData[F](ra, m, store.bitpeace.fetchData2(RangeDef.all)(Stream.emit(m)))))
store.bitpeace
.get(ra.fileId.id)
.unNoneTerminate
.compile
.last
.map(
_.map(m =>
AttachmentData[F](
ra,
m,
store.bitpeace.fetchData2(RangeDef.all)(Stream.emit(m))
)
)
)
case None =>
(None: Option[AttachmentData[F]]).pure[F]
})
}
def setTags(item: Ident, tagIds: List[Ident], collective: Ident): F[AddResult] = {
val db = for {
cid <- RItem.getCollective(item)
nd <- if (cid.contains(collective)) RTagItem.deleteItemTags(item) else 0.pure[ConnectionIO]
ni <- if (tagIds.nonEmpty && cid.contains(collective)) RTagItem.insertItemTags(item, tagIds) else 0.pure[ConnectionIO]
nd <- if (cid.contains(collective)) RTagItem.deleteItemTags(item)
else 0.pure[ConnectionIO]
ni <- if (tagIds.nonEmpty && cid.contains(collective))
RTagItem.insertItemTags(item, tagIds)
else 0.pure[ConnectionIO]
} yield nd + ni
store.transact(db).
attempt.
map(AddResult.fromUpdate)
store.transact(db).attempt.map(AddResult.fromUpdate)
}
def setDirection(item: Ident, direction: Direction, collective: Ident): F[AddResult] =
store.transact(RItem.updateDirection(item, collective, direction)).
attempt.
map(AddResult.fromUpdate)
store
.transact(RItem.updateDirection(item, collective, direction))
.attempt
.map(AddResult.fromUpdate)
def setCorrOrg(item: Ident, org: Option[Ident], collective: Ident): F[AddResult] =
store.transact(RItem.updateCorrOrg(item, collective, org)).
attempt.
map(AddResult.fromUpdate)
store.transact(RItem.updateCorrOrg(item, collective, org)).attempt.map(AddResult.fromUpdate)
def setCorrPerson(item: Ident, person: Option[Ident], collective: Ident): F[AddResult] =
store.transact(RItem.updateCorrPerson(item, collective, person)).
attempt.
map(AddResult.fromUpdate)
store
.transact(RItem.updateCorrPerson(item, collective, person))
.attempt
.map(AddResult.fromUpdate)
def setConcPerson(item: Ident, person: Option[Ident], collective: Ident): F[AddResult] =
store.transact(RItem.updateConcPerson(item, collective, person)).
attempt.
map(AddResult.fromUpdate)
store
.transact(RItem.updateConcPerson(item, collective, person))
.attempt
.map(AddResult.fromUpdate)
def setConcEquip(item: Ident, equip: Option[Ident], collective: Ident): F[AddResult] =
store.transact(RItem.updateConcEquip(item, collective, equip)).
attempt.
map(AddResult.fromUpdate)
store
.transact(RItem.updateConcEquip(item, collective, equip))
.attempt
.map(AddResult.fromUpdate)
def setNotes(item: Ident, notes: Option[String], collective: Ident): F[AddResult] =
store.transact(RItem.updateNotes(item, collective, notes)).
attempt.
map(AddResult.fromUpdate)
store.transact(RItem.updateNotes(item, collective, notes)).attempt.map(AddResult.fromUpdate)
def setName(item: Ident, name: String, collective: Ident): F[AddResult] =
store.transact(RItem.updateName(item, collective, name)).
attempt.
map(AddResult.fromUpdate)
store.transact(RItem.updateName(item, collective, name)).attempt.map(AddResult.fromUpdate)
def setState(item: Ident, state: ItemState, collective: Ident): F[AddResult] =
store.transact(RItem.updateStateForCollective(item, state, collective)).
attempt.
map(AddResult.fromUpdate)
store
.transact(RItem.updateStateForCollective(item, state, collective))
.attempt
.map(AddResult.fromUpdate)
def setItemDate(item: Ident, date: Option[Timestamp], collective: Ident): F[AddResult] =
store.transact(RItem.updateDate(item, collective, date)).
attempt.
map(AddResult.fromUpdate)
store.transact(RItem.updateDate(item, collective, date)).attempt.map(AddResult.fromUpdate)
def setItemDueDate(item: Ident, date: Option[Timestamp], collective: Ident): F[AddResult] =
store.transact(RItem.updateDueDate(item, collective, date)).
attempt.
map(AddResult.fromUpdate)
store
.transact(RItem.updateDueDate(item, collective, date))
.attempt
.map(AddResult.fromUpdate)
def delete(itemId: Ident, collective: Ident): F[Int] =
QItem.delete(store)(itemId, collective)

View File

@ -21,9 +21,9 @@ object OJob {
sealed trait JobCancelResult
object JobCancelResult {
case object Removed extends JobCancelResult
case object Removed extends JobCancelResult
case object CancelRequested extends JobCancelResult
case object JobNotFound extends JobCancelResult
case object JobNotFound extends JobCancelResult
}
case class JobDetail(job: RJob, logs: Vector[RJobLog])
@ -36,15 +36,19 @@ object OJob {
jobs.filter(_.job.state == JobState.Running)
}
def apply[F[_]: ConcurrentEffect](store: Store[F], clientEC: ExecutionContext): Resource[F, OJob[F]] =
def apply[F[_]: ConcurrentEffect](
store: Store[F],
clientEC: ExecutionContext
): Resource[F, OJob[F]] =
Resource.pure(new OJob[F] {
def queueState(collective: Ident, maxResults: Int): F[CollectiveQueueState] = {
store.transact(QJob.queueStateSnapshot(collective).take(maxResults.toLong)).
map(t => JobDetail(t._1, t._2)).
compile.toVector.
map(CollectiveQueueState)
}
def queueState(collective: Ident, maxResults: Int): F[CollectiveQueueState] =
store
.transact(QJob.queueStateSnapshot(collective).take(maxResults.toLong))
.map(t => JobDetail(t._1, t._2))
.compile
.toVector
.map(CollectiveQueueState)
def cancelJob(id: Ident, collective: Ident): F[JobCancelResult] = {
def mustCancel(job: Option[RJob]): Option[(RJob, Ident)] =
@ -58,26 +62,27 @@ object OJob {
val tryDelete = for {
job <- RJob.findByIdAndGroup(id, collective)
jobm = job.filter(canDelete)
jobm = job.filter(canDelete)
del <- jobm.traverse(j => RJob.delete(j.id))
} yield del match {
case Some(_) => Right(JobCancelResult.Removed: JobCancelResult)
case None => Left(mustCancel(job))
case None => Left(mustCancel(job))
}
def tryCancel(job: RJob, worker: Ident): F[JobCancelResult] =
OJoex.cancelJob(job.id, worker, store, clientEC).
map(flag => if (flag) JobCancelResult.CancelRequested else JobCancelResult.JobNotFound)
OJoex
.cancelJob(job.id, worker, store, clientEC)
.map(flag => if (flag) JobCancelResult.CancelRequested else JobCancelResult.JobNotFound)
for {
tryDel <- store.transact(tryDelete)
result <- tryDel match {
case Right(r) => r.pure[F]
case Left(Some((job, worker))) =>
tryCancel(job, worker)
case Left(None) =>
(JobCancelResult.JobNotFound: OJob.JobCancelResult).pure[F]
}
tryDel <- store.transact(tryDelete)
result <- tryDel match {
case Right(r) => r.pure[F]
case Left(Some((job, worker))) =>
tryCancel(job, worker)
case Left(None) =>
(JobCancelResult.JobNotFound: OJob.JobCancelResult).pure[F]
}
} yield result
}
})

View File

@ -13,24 +13,32 @@ import scala.concurrent.ExecutionContext
import org.log4s._
object OJoex {
private [this] val logger = getLogger
private[this] val logger = getLogger
def notifyAll[F[_]: ConcurrentEffect](store: Store[F], clientExecutionContext: ExecutionContext): F[Unit] = {
def notifyAll[F[_]: ConcurrentEffect](
store: Store[F],
clientExecutionContext: ExecutionContext
): F[Unit] =
for {
nodes <- store.transact(RNode.findAll(NodeType.Joex))
_ <- nodes.toList.traverse(notifyJoex[F](clientExecutionContext))
} yield ()
}
def cancelJob[F[_]: ConcurrentEffect](jobId: Ident, worker: Ident, store: Store[F], clientEc: ExecutionContext): F[Boolean] =
def cancelJob[F[_]: ConcurrentEffect](
jobId: Ident,
worker: Ident,
store: Store[F],
clientEc: ExecutionContext
): F[Boolean] =
for {
node <- store.transact(RNode.findById(worker))
node <- store.transact(RNode.findById(worker))
cancel <- node.traverse(joexCancel(clientEc)(_, jobId))
} yield cancel.getOrElse(false)
private def joexCancel[F[_]: ConcurrentEffect](ec: ExecutionContext)(node: RNode, job: Ident): F[Boolean] = {
val notifyUrl = node.url/"api"/"v1"/"job"/job.id/"cancel"
private def joexCancel[F[_]: ConcurrentEffect](
ec: ExecutionContext
)(node: RNode, job: Ident): F[Boolean] = {
val notifyUrl = node.url / "api" / "v1" / "job" / job.id / "cancel"
BlazeClientBuilder[F](ec).resource.use { client =>
val req = Request[F](POST, Uri.unsafeFromString(notifyUrl.asString))
client.expect[String](req).map(_ => true)
@ -38,7 +46,7 @@ object OJoex {
}
private def notifyJoex[F[_]: ConcurrentEffect](ec: ExecutionContext)(node: RNode): F[Unit] = {
val notifyUrl = node.url/"api"/"v1"/"notify"
val notifyUrl = node.url / "api" / "v1" / "notify"
val execute = BlazeClientBuilder[F](ec).resource.use { client =>
val req = Request[F](POST, Uri.unsafeFromString(notifyUrl.asString))
client.expect[String](req).map(_ => ())

View File

@ -36,13 +36,15 @@ object OOrganization {
case class PersonAndContacts(person: RPerson, contacts: Seq[RContact])
def apply[F[_] : Effect](store: Store[F]): Resource[F, OOrganization[F]] =
def apply[F[_]: Effect](store: Store[F]): Resource[F, OOrganization[F]] =
Resource.pure(new OOrganization[F] {
def findAllOrg(account: AccountId): F[Vector[OrgAndContacts]] =
store.transact(QOrganization.findOrgAndContact(account.collective, _.name)).
map({ case (org, cont) => OrgAndContacts(org, cont) }).
compile.toVector
store
.transact(QOrganization.findOrgAndContact(account.collective, _.name))
.map({ case (org, cont) => OrgAndContacts(org, cont) })
.compile
.toVector
def findAllOrgRefs(account: AccountId): F[Vector[IdRef]] =
store.transact(ROrganization.findAllRef(account.collective, _.name))
@ -54,9 +56,11 @@ object OOrganization {
QOrganization.updateOrg(s.org, s.contacts, s.org.cid)(store)
def findAllPerson(account: AccountId): F[Vector[PersonAndContacts]] =
store.transact(QOrganization.findPersonAndContact(account.collective, _.name)).
map({ case (person, cont) => PersonAndContacts(person, cont) }).
compile.toVector
store
.transact(QOrganization.findPersonAndContact(account.collective, _.name))
.map({ case (person, cont) => PersonAndContacts(person, cont) })
.compile
.toVector
def findAllPersonRefs(account: AccountId): F[Vector[IdRef]] =
store.transact(RPerson.findAllRef(account.collective, _.name))
@ -68,14 +72,13 @@ object OOrganization {
QOrganization.updatePerson(s.person, s.contacts, s.person.cid)(store)
def deleteOrg(orgId: Ident, collective: Ident): F[AddResult] =
store.transact(QOrganization.deleteOrg(orgId, collective)).
attempt.
map(AddResult.fromUpdate)
store.transact(QOrganization.deleteOrg(orgId, collective)).attempt.map(AddResult.fromUpdate)
def deletePerson(personId: Ident, collective: Ident): F[AddResult] =
store.transact(QOrganization.deletePerson(personId, collective)).
attempt.
map(AddResult.fromUpdate)
store
.transact(QOrganization.deletePerson(personId, collective))
.attempt
.map(AddResult.fromUpdate)
})
}

View File

@ -41,8 +41,6 @@ object OSource {
}
def delete(id: Ident, collective: Ident): F[AddResult] =
store.transact(RSource.delete(id, collective)).
attempt.
map(AddResult.fromUpdate)
store.transact(RSource.delete(id, collective)).attempt.map(AddResult.fromUpdate)
})
}

View File

@ -17,7 +17,6 @@ trait OTag[F[_]] {
def delete(id: Ident, collective: Ident): F[AddResult]
}
object OTag {
def apply[F[_]: Effect](store: Store[F]): Resource[F, OTag[F]] =
@ -47,10 +46,7 @@ object OTag {
n0 <- optTag.traverse(t => RTagItem.deleteTag(t.tagId))
n1 <- optTag.traverse(t => RTag.delete(t.tagId, collective))
} yield n0.getOrElse(0) + n1.getOrElse(0)
store.transact(io).
attempt.
map(AddResult.fromUpdate)
store.transact(io).attempt.map(AddResult.fromUpdate)
}
})
}

View File

@ -22,75 +22,113 @@ trait OUpload[F[_]] {
}
object OUpload {
private [this] val logger = getLogger
private[this] val logger = getLogger
case class File[F[_]](name: Option[String], advertisedMime: Option[MimeType], data: Stream[F, Byte])
case class File[F[_]](
name: Option[String],
advertisedMime: Option[MimeType],
data: Stream[F, Byte]
)
case class UploadMeta( direction: Option[Direction]
, sourceAbbrev: String
, validFileTypes: Seq[MimeType])
case class UploadMeta(
direction: Option[Direction],
sourceAbbrev: String,
validFileTypes: Seq[MimeType]
)
case class UploadData[F[_]]( multiple: Boolean
, meta: UploadMeta
, files: Vector[File[F]], priority: Priority, tracker: Option[Ident])
case class UploadData[F[_]](
multiple: Boolean,
meta: UploadMeta,
files: Vector[File[F]],
priority: Priority,
tracker: Option[Ident]
)
sealed trait UploadResult
object UploadResult {
case object Success extends UploadResult
case object NoFiles extends UploadResult
case object Success extends UploadResult
case object NoFiles extends UploadResult
case object NoSource extends UploadResult
}
def apply[F[_]: ConcurrentEffect](store: Store[F], queue: JobQueue[F], cfg: Config, httpClientEC: ExecutionContext): Resource[F, OUpload[F]] =
def apply[F[_]: ConcurrentEffect](
store: Store[F],
queue: JobQueue[F],
cfg: Config,
httpClientEC: ExecutionContext
): Resource[F, OUpload[F]] =
Resource.pure(new OUpload[F] {
def submit(data: OUpload.UploadData[F], account: AccountId): F[OUpload.UploadResult] = {
def submit(data: OUpload.UploadData[F], account: AccountId): F[OUpload.UploadResult] =
for {
files <- data.files.traverse(saveFile).map(_.flatten)
pred <- checkFileList(files)
lang <- store.transact(RCollective.findLanguage(account.collective))
meta = ProcessItemArgs.ProcessMeta(account.collective, lang.getOrElse(Language.German), data.meta.direction, data.meta.sourceAbbrev, data.meta.validFileTypes)
args = if (data.multiple) files.map(f => ProcessItemArgs(meta, List(f))) else Vector(ProcessItemArgs(meta, files.toList))
job <- pred.traverse(_ => makeJobs(args, account, data.priority, data.tracker))
_ <- logger.fdebug(s"Storing jobs: $job")
res <- job.traverse(submitJobs)
_ <- store.transact(RSource.incrementCounter(data.meta.sourceAbbrev, account.collective))
meta = ProcessItemArgs.ProcessMeta(
account.collective,
lang.getOrElse(Language.German),
data.meta.direction,
data.meta.sourceAbbrev,
data.meta.validFileTypes
)
args = if (data.multiple) files.map(f => ProcessItemArgs(meta, List(f)))
else Vector(ProcessItemArgs(meta, files.toList))
job <- pred.traverse(_ => makeJobs(args, account, data.priority, data.tracker))
_ <- logger.fdebug(s"Storing jobs: $job")
res <- job.traverse(submitJobs)
_ <- store.transact(RSource.incrementCounter(data.meta.sourceAbbrev, account.collective))
} yield res.fold(identity, identity)
}
def submit(data: OUpload.UploadData[F], sourceId: Ident): F[OUpload.UploadResult] =
for {
sOpt <- store.transact(RSource.find(sourceId)).map(_.toRight(UploadResult.NoSource))
abbrev = sOpt.map(_.abbrev).toOption.getOrElse(data.meta.sourceAbbrev)
updata = data.copy(meta = data.meta.copy(sourceAbbrev = abbrev))
accId = sOpt.map(source => AccountId(source.cid, source.sid))
result <- accId.traverse(acc => submit(updata, acc))
sOpt <- store.transact(RSource.find(sourceId)).map(_.toRight(UploadResult.NoSource))
abbrev = sOpt.map(_.abbrev).toOption.getOrElse(data.meta.sourceAbbrev)
updata = data.copy(meta = data.meta.copy(sourceAbbrev = abbrev))
accId = sOpt.map(source => AccountId(source.cid, source.sid))
result <- accId.traverse(acc => submit(updata, acc))
} yield result.fold(identity, identity)
private def submitJobs(jobs: Vector[RJob]): F[OUpload.UploadResult] = {
private def submitJobs(jobs: Vector[RJob]): F[OUpload.UploadResult] =
for {
_ <- logger.fdebug(s"Storing jobs: $jobs")
_ <- queue.insertAll(jobs)
_ <- OJoex.notifyAll(store, httpClientEC)
_ <- logger.fdebug(s"Storing jobs: $jobs")
_ <- queue.insertAll(jobs)
_ <- OJoex.notifyAll(store, httpClientEC)
} yield UploadResult.Success
}
private def saveFile(file: File[F]): F[Option[ProcessItemArgs.File]] = {
private def saveFile(file: File[F]): F[Option[ProcessItemArgs.File]] =
logger.finfo(s"Receiving file $file") *>
store.bitpeace.saveNew(file.data, cfg.files.chunkSize, MimetypeHint(file.name, None), None).
compile.lastOrError.map(fm => Ident.unsafe(fm.id)).attempt.
map(_.fold(ex => {
logger.warn(ex)(s"Could not store file for processing!")
None
}, id => Some(ProcessItemArgs.File(file.name, id))))
}
store.bitpeace
.saveNew(file.data, cfg.files.chunkSize, MimetypeHint(file.name, None), None)
.compile
.lastOrError
.map(fm => Ident.unsafe(fm.id))
.attempt
.map(_.fold(ex => {
logger.warn(ex)(s"Could not store file for processing!")
None
}, id => Some(ProcessItemArgs.File(file.name, id))))
private def checkFileList(files: Seq[ProcessItemArgs.File]): F[Either[UploadResult, Unit]] =
Effect[F].pure(if (files.isEmpty) Left(UploadResult.NoFiles) else Right(()))
private def makeJobs(args: Vector[ProcessItemArgs], account: AccountId, prio: Priority, tracker: Option[Ident]): F[Vector[RJob]] = {
private def makeJobs(
args: Vector[ProcessItemArgs],
account: AccountId,
prio: Priority,
tracker: Option[Ident]
): F[Vector[RJob]] = {
def create(id: Ident, now: Timestamp, arg: ProcessItemArgs): RJob =
RJob.newJob(id, ProcessItemArgs.taskName, account.collective, arg, arg.makeSubject, now, account.user, prio, tracker)
RJob.newJob(
id,
ProcessItemArgs.taskName,
account.collective,
arg,
arg.makeSubject,
now,
account.user,
prio,
tracker
)
for {
id <- Ident.randomId[F]

View File

@ -20,10 +20,10 @@ object Config {
def fromString(str: String): Either[String, Mode] =
str.toLowerCase match {
case "open" => Right(Open)
case "open" => Right(Open)
case "invite" => Right(Invite)
case "closed" => Right(Closed)
case _ => Left(s"Invalid signup mode: $str")
case _ => Left(s"Invalid signup mode: $str")
}
def unsafe(str: String): Mode =
fromString(str).fold(sys.error, identity)
@ -34,7 +34,7 @@ object Config {
Decoder.decodeString.emap(fromString)
}
def open: Mode = Mode.Open
def open: Mode = Mode.Open
def invite: Mode = Mode.Invite
def closed: Mode = Mode.Closed

View File

@ -9,11 +9,11 @@ sealed trait NewInviteResult { self: Product =>
}
object NewInviteResult {
case class Success(id: Ident) extends NewInviteResult
case class Success(id: Ident) extends NewInviteResult
case object InvitationDisabled extends NewInviteResult
case object PasswordMismatch extends NewInviteResult
case object PasswordMismatch extends NewInviteResult
def passwordMismatch: NewInviteResult = PasswordMismatch
def invitationClosed: NewInviteResult = InvitationDisabled
def passwordMismatch: NewInviteResult = PasswordMismatch
def invitationClosed: NewInviteResult = InvitationDisabled
def success(id: Ident): NewInviteResult = Success(id)
}

View File

@ -21,19 +21,19 @@ trait OSignup[F[_]] {
object OSignup {
private[this] val logger = getLogger
def apply[F[_]:Effect](store: Store[F]): Resource[F, OSignup[F]] =
def apply[F[_]: Effect](store: Store[F]): Resource[F, OSignup[F]] =
Resource.pure(new OSignup[F] {
def newInvite(cfg: Config)(password: Password): F[NewInviteResult] = {
def newInvite(cfg: Config)(password: Password): F[NewInviteResult] =
if (cfg.mode == Config.Mode.Invite) {
if (cfg.newInvitePassword.isEmpty || cfg.newInvitePassword != password) NewInviteResult.passwordMismatch.pure[F]
if (cfg.newInvitePassword.isEmpty || cfg.newInvitePassword != password)
NewInviteResult.passwordMismatch.pure[F]
else store.transact(RInvitation.insertNew).map(ri => NewInviteResult.success(ri.id))
} else {
Effect[F].pure(NewInviteResult.invitationClosed)
}
}
def register(cfg: Config)(data: RegisterData): F[SignupResult] = {
def register(cfg: Config)(data: RegisterData): F[SignupResult] =
cfg.mode match {
case Config.Mode.Open =>
addUser(data).map(SignupResult.fromAddResult)
@ -45,11 +45,11 @@ object OSignup {
data.invite match {
case Some(inv) =>
for {
now <- Timestamp.current[F]
min = now.minus(cfg.inviteTime)
ok <- store.transact(RInvitation.useInvite(inv, min))
res <- if (ok) addUser(data).map(SignupResult.fromAddResult)
else SignupResult.invalidInvitationKey.pure[F]
now <- Timestamp.current[F]
min = now.minus(cfg.inviteTime)
ok <- store.transact(RInvitation.useInvite(inv, min))
res <- if (ok) addUser(data).map(SignupResult.fromAddResult)
else SignupResult.invalidInvitationKey.pure[F]
_ <- if (retryInvite(res))
logger.fdebug(s"Adding account failed ($res). Allow retry with invite.") *> store
.transact(
@ -61,7 +61,6 @@ object OSignup {
SignupResult.invalidInvitationKey.pure[F]
}
}
}
private def retryInvite(res: SignupResult): Boolean =
res match {
@ -77,29 +76,37 @@ object OSignup {
false
}
private def addUser(data: RegisterData): F[AddResult] = {
def toRecords: F[(RCollective, RUser)] =
for {
id2 <- Ident.randomId[F]
now <- Timestamp.current[F]
c = RCollective(data.collName, CollectiveState.Active, Language.German, now)
u = RUser(id2, data.login, data.collName, PasswordCrypt.crypt(data.password), UserState.Active, None, 0, None, now)
c = RCollective(data.collName, CollectiveState.Active, Language.German, now)
u = RUser(
id2,
data.login,
data.collName,
PasswordCrypt.crypt(data.password),
UserState.Active,
None,
0,
None,
now
)
} yield (c, u)
def insert(coll: RCollective, user: RUser): ConnectionIO[Int] = {
def insert(coll: RCollective, user: RUser): ConnectionIO[Int] =
for {
n1 <- RCollective.insert(coll)
n2 <- RUser.insert(user)
} yield n1 + n2
}
def collectiveExists: ConnectionIO[Boolean] =
RCollective.existsById(data.collName)
val msg = s"The collective '${data.collName}' already exists."
for {
cu <- toRecords
cu <- toRecords
save <- store.add(insert(cu._1, cu._2), collectiveExists)
} yield save.fold(identity, _.withMsg(msg), identity)
}

View File

@ -2,27 +2,25 @@ package docspell.backend.signup
import docspell.store.AddResult
sealed trait SignupResult {
}
sealed trait SignupResult {}
object SignupResult {
case object CollectiveExists extends SignupResult
case object InvalidInvitationKey extends SignupResult
case object SignupClosed extends SignupResult
case object CollectiveExists extends SignupResult
case object InvalidInvitationKey extends SignupResult
case object SignupClosed extends SignupResult
case class Failure(ex: Throwable) extends SignupResult
case object Success extends SignupResult
case object Success extends SignupResult
def collectiveExists: SignupResult = CollectiveExists
def invalidInvitationKey: SignupResult = InvalidInvitationKey
def signupClosed: SignupResult = SignupClosed
def collectiveExists: SignupResult = CollectiveExists
def invalidInvitationKey: SignupResult = InvalidInvitationKey
def signupClosed: SignupResult = SignupClosed
def failure(ex: Throwable): SignupResult = Failure(ex)
def success: SignupResult = Success
def success: SignupResult = Success
def fromAddResult(ar: AddResult): SignupResult = ar match {
case AddResult.Success => Success
case AddResult.Failure(ex) => Failure(ex)
case AddResult.Success => Success
case AddResult.Failure(ex) => Failure(ex)
case AddResult.EntityExists(_) => CollectiveExists
}
}

View File

@ -1,12 +1,14 @@
package docspell.common
case class Banner( component: String
, version: String
, gitHash: Option[String]
, jdbcUrl: LenientUri
, configFile: Option[String]
, appId: Ident
, baseUrl: LenientUri) {
case class Banner(
component: String,
version: String,
gitHash: Option[String],
jdbcUrl: LenientUri,
configFile: Option[String],
appId: Ident,
baseUrl: LenientUri
) {
private val banner =
"""______ _ _
@ -17,16 +19,16 @@ case class Banner( component: String
||___/ \___/ \___|___/ .__/ \___|_|_|
| | |
|""".stripMargin +
s""" |_| v$version (#${gitHash.map(_.take(8)).getOrElse("")})"""
s""" |_| v$version (#${gitHash.map(_.take(8)).getOrElse("")})"""
def render(prefix: String): String = {
val text = banner.split('\n').toList ++ List(
s"<< $component >>"
, s"Id: ${appId.id}"
, s"Base-Url: ${baseUrl.asString}"
, s"Database: ${jdbcUrl.asString}"
, s"Config: ${configFile.getOrElse("")}"
, ""
s"<< $component >>",
s"Id: ${appId.id}",
s"Base-Url: ${baseUrl.asString}",
s"Database: ${jdbcUrl.asString}",
s"Config: ${configFile.getOrElse("")}",
""
)
text.map(line => s"$prefix $line").mkString("\n")

View File

@ -21,7 +21,7 @@ object Direction {
str.toLowerCase match {
case "incoming" => Right(Incoming)
case "outgoing" => Right(Outgoing)
case _ => Left(s"No direction: $str")
case _ => Left(s"No direction: $str")
}
def unsafe(str: String): Direction =

View File

@ -3,9 +3,7 @@ package docspell.common
import io.circe._
import io.circe.generic.semiauto._
case class IdRef(id: Ident, name: String) {
}
case class IdRef(id: Ident, name: String) {}
object IdRef {
@ -13,4 +11,4 @@ object IdRef {
deriveEncoder[IdRef]
implicit val jsonDecoder: Decoder[IdRef] =
deriveDecoder[IdRef]
}
}

View File

@ -10,18 +10,18 @@ sealed trait ItemState { self: Product =>
object ItemState {
case object Premature extends ItemState
case object Premature extends ItemState
case object Processing extends ItemState
case object Created extends ItemState
case object Confirmed extends ItemState
case object Created extends ItemState
case object Confirmed extends ItemState
def fromString(str: String): Either[String, ItemState] =
str.toLowerCase match {
case "premature" => Right(Premature)
case "premature" => Right(Premature)
case "processing" => Right(Processing)
case "created" => Right(Created)
case "confirmed" => Right(Confirmed)
case _ => Left(s"Invalid item state: $str")
case "created" => Right(Created)
case "confirmed" => Right(Confirmed)
case _ => Left(s"Invalid item state: $str")
}
def unsafe(str: String): ItemState =
@ -32,4 +32,3 @@ object ItemState {
implicit val jsonEncoder: Encoder[ItemState] =
Encoder.encodeString.contramap(_.name)
}

View File

@ -31,14 +31,12 @@ object Language {
def fromString(str: String): Either[String, Language] = {
val lang = str.toLowerCase
all.find(_.allNames.contains(lang)).
toRight(s"Unsupported or invalid language: $str")
all.find(_.allNames.contains(lang)).toRight(s"Unsupported or invalid language: $str")
}
def unsafe(str: String): Language =
fromString(str).fold(sys.error, identity)
implicit val jsonDecoder: Decoder[Language] =
Decoder.decodeString.emap(fromString)
implicit val jsonEncoder: Encoder[Language] =

View File

@ -11,8 +11,8 @@ sealed trait LogLevel { self: Product =>
object LogLevel {
case object Debug extends LogLevel { val toInt = 0 }
case object Info extends LogLevel { val toInt = 1 }
case object Warn extends LogLevel { val toInt = 2 }
case object Info extends LogLevel { val toInt = 1 }
case object Warn extends LogLevel { val toInt = 2 }
case object Error extends LogLevel { val toInt = 3 }
def fromInt(n: Int): LogLevel =
@ -26,12 +26,12 @@ object LogLevel {
def fromString(str: String): Either[String, LogLevel] =
str.toLowerCase match {
case "debug" => Right(Debug)
case "info" => Right(Info)
case "warn" => Right(Warn)
case "debug" => Right(Debug)
case "info" => Right(Info)
case "warn" => Right(Warn)
case "warning" => Right(Warn)
case "error" => Right(Error)
case _ => Left(s"Invalid log-level: $str")
case "error" => Right(Error)
case _ => Left(s"Invalid log-level: $str")
}
def unsafeString(str: String): LogLevel =
@ -41,4 +41,4 @@ object LogLevel {
Decoder.decodeString.emap(fromString)
implicit val jsonEncoder: Encoder[LogLevel] =
Encoder.encodeString.contramap(_.name)
}
}

View File

@ -12,7 +12,7 @@ case class MimeType(primary: String, sub: String) {
def matches(other: MimeType): Boolean =
primary == other.primary &&
(sub == other.sub || sub == "*" )
(sub == other.sub || sub == "*")
}
object MimeType {
@ -26,9 +26,10 @@ object MimeType {
def image(sub: String): MimeType =
MimeType("image", partFromString(sub).throwLeft)
private[this] val validChars: Set[Char] = (('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9') ++ "*-").toSet
private[this] val validChars: Set[Char] =
(('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9') ++ "*-").toSet
def parse(str: String): Either[String, MimeType] = {
def parse(str: String): Either[String, MimeType] =
str.indexOf('/') match {
case -1 => Left(s"Invalid MIME type: $str")
case n =>
@ -37,7 +38,6 @@ object MimeType {
sub <- partFromString(str.substring(n + 1))
} yield MimeType(prim.toLowerCase, sub.toLowerCase)
}
}
def unsafe(str: String): MimeType =
parse(str).throwLeft
@ -47,12 +47,12 @@ object MimeType {
else Left(s"Invalid identifier: $s. Allowed chars: ${validChars.mkString}")
val octetStream = application("octet-stream")
val pdf = application("pdf")
val png = image("png")
val jpeg = image("jpeg")
val tiff = image("tiff")
val html = text("html")
val plain = text("plain")
val pdf = application("pdf")
val png = image("png")
val jpeg = image("jpeg")
val tiff = image("tiff")
val html = text("html")
val plain = text("plain")
implicit val jsonEncoder: Encoder[MimeType] =
Encoder.encodeString.contramap(_.asString)

View File

@ -2,6 +2,4 @@ package docspell.common
import java.time.LocalDate
case class NerDateLabel(date: LocalDate, label: NerLabel) {
}
case class NerDateLabel(date: LocalDate, label: NerLabel) {}

View File

@ -3,9 +3,7 @@ package docspell.common
import io.circe.generic.semiauto._
import io.circe.{Decoder, Encoder}
case class NerLabel(label: String, tag: NerTag, startPosition: Int, endPosition: Int) {
}
case class NerLabel(label: String, tag: NerTag, startPosition: Int, endPosition: Int) {}
object NerLabel {
implicit val jsonEncoder: Encoder[NerLabel] = deriveEncoder[NerLabel]

View File

@ -10,13 +10,13 @@ sealed trait NodeType { self: Product =>
object NodeType {
case object Restserver extends NodeType
case object Joex extends NodeType
case object Joex extends NodeType
def fromString(str: String): Either[String, NodeType] =
str.toLowerCase match {
case "restserver" => Right(Restserver)
case "joex" => Right(Joex)
case _ => Left(s"Invalid node type: $str")
case "joex" => Right(Joex)
case _ => Left(s"Invalid node type: $str")
}
def unsafe(str: String): NodeType =

View File

@ -4,7 +4,7 @@ import io.circe.{Decoder, Encoder}
final class Password(val pass: String) extends AnyVal {
def isEmpty: Boolean= pass.isEmpty
def isEmpty: Boolean = pass.isEmpty
override def toString: String =
if (pass.isEmpty) "<empty>" else "***"

View File

@ -16,25 +16,23 @@ object Priority {
case object Low extends Priority
def fromString(str: String): Either[String, Priority] =
str.toLowerCase match {
case "high" => Right(High)
case "low" => Right(Low)
case _ => Left(s"Invalid priority: $str")
case "low" => Right(Low)
case _ => Left(s"Invalid priority: $str")
}
def unsafe(str: String): Priority =
fromString(str).fold(sys.error, identity)
def fromInt(n: Int): Priority =
if (n <= toInt(Low)) Low
else High
def toInt(p: Priority): Int =
p match {
case Low => 0
case Low => 0
case High => 10
}

View File

@ -6,14 +6,13 @@ import ProcessItemArgs._
case class ProcessItemArgs(meta: ProcessMeta, files: List[File]) {
def makeSubject: String = {
def makeSubject: String =
files.flatMap(_.name) match {
case Nil => s"${meta.sourceAbbrev}: No files"
case n :: Nil => n
case Nil => s"${meta.sourceAbbrev}: No files"
case n :: Nil => n
case n1 :: n2 :: Nil => s"$n1, $n2"
case _ => s"${files.size} files from ${meta.sourceAbbrev}"
case _ => s"${files.size} files from ${meta.sourceAbbrev}"
}
}
}
@ -21,11 +20,13 @@ object ProcessItemArgs {
val taskName = Ident.unsafe("process-item")
case class ProcessMeta( collective: Ident
, language: Language
, direction: Option[Direction]
, sourceAbbrev: String
, validFileTypes: Seq[MimeType])
case class ProcessMeta(
collective: Ident,
language: Language,
direction: Option[Direction],
sourceAbbrev: String,
validFileTypes: Seq[MimeType]
)
object ProcessMeta {
implicit val jsonEncoder: Encoder[ProcessMeta] = deriveEncoder[ProcessMeta]

View File

@ -30,9 +30,7 @@ object Timestamp {
def current[F[_]: Sync]: F[Timestamp] =
Sync[F].delay(Timestamp(Instant.now))
implicit val encodeTimestamp: Encoder[Timestamp] =
implicit val encodeTimestamp: Encoder[Timestamp] =
BaseJsonCodecs.encodeInstantEpoch.contramap(_.value)
implicit val decodeTimestamp: Decoder[Timestamp] =

View File

@ -12,19 +12,18 @@ object UserState {
/** The user is blocked by an admin. */
case object Disabled extends UserState
def fromString(s: String): Either[String, UserState] =
s.toLowerCase match {
case "active" => Right(Active)
case "active" => Right(Active)
case "disabled" => Right(Disabled)
case _ => Left(s"Not a state value: $s")
case _ => Left(s"Not a state value: $s")
}
def unsafe(str: String): UserState =
fromString(str).fold(sys.error, identity)
def asString(s: UserState): String = s match {
case Active => "active"
case Active => "active"
case Disabled => "disabled"
}
@ -34,4 +33,4 @@ object UserState {
implicit val userStateDecoder: Decoder[UserState] =
Decoder.decodeString.emap(UserState.fromString)
}
}

View File

@ -4,18 +4,18 @@ trait EitherSyntax {
implicit final class LeftStringEitherOps[A](e: Either[String, A]) {
def throwLeft: A = e match {
case Right(a) => a
case Right(a) => a
case Left(err) => sys.error(err)
}
}
implicit final class ThrowableLeftEitherOps[A](e: Either[Throwable, A]) {
def throwLeft: A = e match {
case Right(a) => a
case Right(a) => a
case Left(err) => throw err
}
}
}
object EitherSyntax extends EitherSyntax
object EitherSyntax extends EitherSyntax

View File

@ -11,13 +11,18 @@ trait StreamSyntax {
implicit class StringStreamOps[F[_]](s: Stream[F, String]) {
def parseJsonAs[A](implicit d: Decoder[A], F: Sync[F]): F[Either[Throwable, A]] =
s.fold("")(_ + _).
compile.last.
map(optStr => for {
str <- optStr.map(_.trim).toRight(new Exception("Empty string cannot be parsed into a value"))
json <- parse(str).leftMap(_.underlying)
value <- json.as[A]
} yield value)
s.fold("")(_ + _)
.compile
.last
.map(optStr =>
for {
str <- optStr
.map(_.trim)
.toRight(new Exception("Empty string cannot be parsed into a value"))
json <- parse(str).leftMap(_.underlying)
value <- json.as[A]
} yield value
)
}

View File

@ -5,17 +5,23 @@ import docspell.joex.scheduler.SchedulerConfig
import docspell.store.JdbcConfig
import docspell.text.ocr.{Config => OcrConfig}
case class Config(appId: Ident
, baseUrl: LenientUri
, bind: Config.Bind
, jdbc: JdbcConfig
, scheduler: SchedulerConfig
, extraction: OcrConfig
case class Config(
appId: Ident,
baseUrl: LenientUri,
bind: Config.Bind,
jdbc: JdbcConfig,
scheduler: SchedulerConfig,
extraction: OcrConfig
)
object Config {
val postgres = JdbcConfig(LenientUri.unsafe("jdbc:postgresql://localhost:5432/docspelldev"), "dev", "dev")
val h2 = JdbcConfig(LenientUri.unsafe("jdbc:h2:./target/docspelldev.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE"), "sa", "")
val postgres =
JdbcConfig(LenientUri.unsafe("jdbc:postgresql://localhost:5432/docspelldev"), "dev", "dev")
val h2 = JdbcConfig(
LenientUri.unsafe("jdbc:h2:./target/docspelldev.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE"),
"sa",
""
)
case class Bind(address: String, port: Int)
}

View File

@ -11,7 +11,6 @@ object ConfigFile {
def loadConfig: Config =
ConfigSource.default.at("docspell.joex").loadOrThrow[Config]
object Implicits {
implicit val countingSchemeReader: ConfigReader[CountingScheme] =
ConfigReader[String].emap(reason(CountingScheme.readString))

View File

@ -12,11 +12,13 @@ import fs2.concurrent.SignallingRef
import scala.concurrent.ExecutionContext
final class JoexAppImpl[F[_]: ConcurrentEffect : ContextShift: Timer]( cfg: Config
, nodeOps: ONode[F]
, store: Store[F]
, termSignal: SignallingRef[F, Boolean]
, val scheduler: Scheduler[F]) extends JoexApp[F] {
final class JoexAppImpl[F[_]: ConcurrentEffect: ContextShift: Timer](
cfg: Config,
nodeOps: ONode[F],
store: Store[F],
termSignal: SignallingRef[F, Boolean],
val scheduler: Scheduler[F]
) extends JoexApp[F] {
def init: F[Unit] = {
val run = scheduler.start.compile.drain
@ -40,17 +42,25 @@ final class JoexAppImpl[F[_]: ConcurrentEffect : ContextShift: Timer]( cfg: Conf
object JoexAppImpl {
def create[F[_]: ConcurrentEffect : ContextShift: Timer](cfg: Config
, termSignal: SignallingRef[F, Boolean]
, connectEC: ExecutionContext
, blocker: Blocker): Resource[F, JoexApp[F]] =
def create[F[_]: ConcurrentEffect: ContextShift: Timer](
cfg: Config,
termSignal: SignallingRef[F, Boolean],
connectEC: ExecutionContext,
blocker: Blocker
): Resource[F, JoexApp[F]] =
for {
store <- Store.create(cfg.jdbc, connectEC, blocker)
store <- Store.create(cfg.jdbc, connectEC, blocker)
nodeOps <- ONode(store)
sch <- SchedulerBuilder(cfg.scheduler, blocker, store).
withTask(JobTask.json(ProcessItemArgs.taskName, ItemHandler[F](cfg.extraction), ItemHandler.onCancel[F])).
resource
app = new JoexAppImpl(cfg, nodeOps, store, termSignal, sch)
appR <- Resource.make(app.init.map(_ => app))(_.shutdown)
sch <- SchedulerBuilder(cfg.scheduler, blocker, store)
.withTask(
JobTask.json(
ProcessItemArgs.taskName,
ItemHandler[F](cfg.extraction),
ItemHandler.onCancel[F]
)
)
.resource
app = new JoexAppImpl(cfg, nodeOps, store, termSignal, sch)
appR <- Resource.make(app.init.map(_ => app))(_.shutdown)
} yield appR
}

View File

@ -15,20 +15,26 @@ import scala.concurrent.ExecutionContext
object JoexServer {
private case class App[F[_]](
httpApp: HttpApp[F],
termSig: SignallingRef[F, Boolean],
exitRef: Ref[F, ExitCode]
)
private case class App[F[_]](httpApp: HttpApp[F], termSig: SignallingRef[F, Boolean], exitRef: Ref[F, ExitCode])
def stream[F[_]: ConcurrentEffect : ContextShift](cfg: Config, connectEC: ExecutionContext, blocker: Blocker)
(implicit T: Timer[F]): Stream[F, Nothing] = {
def stream[F[_]: ConcurrentEffect: ContextShift](
cfg: Config,
connectEC: ExecutionContext,
blocker: Blocker
)(implicit T: Timer[F]): Stream[F, Nothing] = {
val app = for {
signal <- Resource.liftF(SignallingRef[F, Boolean](false))
signal <- Resource.liftF(SignallingRef[F, Boolean](false))
exitCode <- Resource.liftF(Ref[F].of(ExitCode.Success))
joexApp <- JoexAppImpl.create[F](cfg, signal, connectEC, blocker)
httpApp = Router(
"/api/info" -> InfoRoutes(),
"/api/v1" -> JoexRoutes(joexApp)
"/api/v1" -> JoexRoutes(joexApp)
).orNotFound
// With Middlewares in place
@ -36,14 +42,15 @@ object JoexServer {
} yield App(finalHttpApp, signal, exitCode)
Stream.resource(app).flatMap(app =>
BlazeServerBuilder[F].
bindHttp(cfg.bind.port, cfg.bind.address).
withHttpApp(app.httpApp).
withoutBanner.
serveWhile(app.termSig, app.exitRef)
)
Stream
.resource(app)
.flatMap(app =>
BlazeServerBuilder[F]
.bindHttp(cfg.bind.port, cfg.bind.address)
.withHttpApp(app.httpApp)
.withoutBanner
.serveWhile(app.termSig, app.exitRef)
)
}.drain
}

View File

@ -14,10 +14,12 @@ object Main extends IOApp {
private[this] val logger = getLogger
val blockingEc: ExecutionContext = ExecutionContext.fromExecutor(
Executors.newCachedThreadPool(ThreadFactories.ofName("docspell-joex-blocking")))
Executors.newCachedThreadPool(ThreadFactories.ofName("docspell-joex-blocking"))
)
val blocker = Blocker.liftExecutionContext(blockingEc)
val connectEC: ExecutionContext = ExecutionContext.fromExecutorService(
Executors.newFixedThreadPool(5, ThreadFactories.ofName("docspell-joex-dbconnect")))
Executors.newFixedThreadPool(5, ThreadFactories.ofName("docspell-joex-dbconnect"))
)
def run(args: List[String]) = {
args match {
@ -40,12 +42,15 @@ object Main extends IOApp {
}
val cfg = ConfigFile.loadConfig
val banner = Banner("JOEX"
, BuildInfo.version
, BuildInfo.gitHeadCommit
, cfg.jdbc.url
, Option(System.getProperty("config.file"))
, cfg.appId, cfg.baseUrl)
val banner = Banner(
"JOEX",
BuildInfo.version,
BuildInfo.gitHeadCommit,
cfg.jdbc.url,
Option(System.getProperty("config.file")),
cfg.appId,
cfg.baseUrl
)
logger.info(s"\n${banner.render("***>")}")
JoexServer.stream[IO](cfg, connectEC, blocker).compile.drain.as(ExitCode.Success)
}

View File

@ -16,56 +16,76 @@ object CreateItem {
def apply[F[_]: Sync]: Task[F, ProcessItemArgs, ItemData] =
findExisting[F].flatMap {
case Some(ri) => Task.pure(ri)
case None => createNew[F]
case None => createNew[F]
}
def createNew[F[_]: Sync]: Task[F, ProcessItemArgs, ItemData] =
Task { ctx =>
val validFiles = ctx.args.meta.validFileTypes.map(_.asString).toSet
def fileMetas(itemId: Ident, now: Timestamp) = Stream.emits(ctx.args.files).
flatMap(f => ctx.store.bitpeace.get(f.fileMetaId.id).map(fm => (f, fm))).
collect({ case (f, Some(fm)) if validFiles.contains(fm.mimetype.baseType) => f }).
zipWithIndex.
evalMap({ case (f, index) =>
Ident.randomId[F].map(id => RAttachment(id, itemId, f.fileMetaId, index.toInt, now, f.name))
}).
compile.toVector
def fileMetas(itemId: Ident, now: Timestamp) =
Stream
.emits(ctx.args.files)
.flatMap(f => ctx.store.bitpeace.get(f.fileMetaId.id).map(fm => (f, fm)))
.collect({ case (f, Some(fm)) if validFiles.contains(fm.mimetype.baseType) => f })
.zipWithIndex
.evalMap({
case (f, index) =>
Ident
.randomId[F]
.map(id => RAttachment(id, itemId, f.fileMetaId, index.toInt, now, f.name))
})
.compile
.toVector
val item = RItem.newItem[F](ctx.args.meta.collective
, ctx.args.makeSubject
, ctx.args.meta.sourceAbbrev
, ctx.args.meta.direction.getOrElse(Direction.Incoming)
, ItemState.Premature)
val item = RItem.newItem[F](
ctx.args.meta.collective,
ctx.args.makeSubject,
ctx.args.meta.sourceAbbrev,
ctx.args.meta.direction.getOrElse(Direction.Incoming),
ItemState.Premature
)
for {
_ <- ctx.logger.info(s"Creating new item with ${ctx.args.files.size} attachment(s)")
_ <- ctx.logger.info(s"Creating new item with ${ctx.args.files.size} attachment(s)")
time <- Duration.stopTime[F]
it <- item
n <- ctx.store.transact(RItem.insert(it))
_ <- if (n != 1) storeItemError[F](ctx) else ().pure[F]
fm <- fileMetas(it.id, it.created)
k <- fm.traverse(a => ctx.store.transact(RAttachment.insert(a)))
_ <- logDifferences(ctx, fm, k.sum)
dur <- time
_ <- ctx.logger.info(s"Creating item finished in ${dur.formatExact}")
it <- item
n <- ctx.store.transact(RItem.insert(it))
_ <- if (n != 1) storeItemError[F](ctx) else ().pure[F]
fm <- fileMetas(it.id, it.created)
k <- fm.traverse(a => ctx.store.transact(RAttachment.insert(a)))
_ <- logDifferences(ctx, fm, k.sum)
dur <- time
_ <- ctx.logger.info(s"Creating item finished in ${dur.formatExact}")
} yield ItemData(it, fm, Vector.empty, Vector.empty)
}
def findExisting[F[_]: Sync]: Task[F, ProcessItemArgs, Option[ItemData]] =
Task { ctx =>
for {
cand <- ctx.store.transact(QItem.findByFileIds(ctx.args.files.map(_.fileMetaId)))
_ <- if (cand.nonEmpty) ctx.logger.warn("Found existing item with these files.") else ().pure[F]
ht <- cand.drop(1).traverse(ri => QItem.delete(ctx.store)(ri.id, ri.cid))
_ <- if (ht.sum > 0) ctx.logger.warn(s"Removed ${ht.sum} items with same attachments") else ().pure[F]
rms <- cand.headOption.traverse(ri => ctx.store.transact(RAttachment.findByItemAndCollective(ri.id, ri.cid)))
} yield cand.headOption.map(ri => ItemData(ri, rms.getOrElse(Vector.empty), Vector.empty, Vector.empty))
cand <- ctx.store.transact(QItem.findByFileIds(ctx.args.files.map(_.fileMetaId)))
_ <- if (cand.nonEmpty) ctx.logger.warn("Found existing item with these files.")
else ().pure[F]
ht <- cand.drop(1).traverse(ri => QItem.delete(ctx.store)(ri.id, ri.cid))
_ <- if (ht.sum > 0) ctx.logger.warn(s"Removed ${ht.sum} items with same attachments")
else ().pure[F]
rms <- cand.headOption.traverse(ri =>
ctx.store.transact(RAttachment.findByItemAndCollective(ri.id, ri.cid))
)
} yield cand.headOption.map(ri =>
ItemData(ri, rms.getOrElse(Vector.empty), Vector.empty, Vector.empty)
)
}
private def logDifferences[F[_]: Sync](ctx: Context[F, ProcessItemArgs], saved: Vector[RAttachment], saveCount: Int): F[Unit] =
private def logDifferences[F[_]: Sync](
ctx: Context[F, ProcessItemArgs],
saved: Vector[RAttachment],
saveCount: Int
): F[Unit] =
if (ctx.args.files.size != saved.size) {
ctx.logger.warn(s"Not all given files (${ctx.args.files.size}) have been stored. Files retained: ${saved.size}; saveCount=$saveCount")
ctx.logger.warn(
s"Not all given files (${ctx.args.files.size}) have been stored. Files retained: ${saved.size}; saveCount=$saveCount"
)
} else {
().pure[F]
}

View File

@ -19,45 +19,65 @@ object FindProposal {
def apply[F[_]: Sync](data: ItemData): Task[F, ProcessItemArgs, ItemData] =
Task { ctx =>
val rmas = data.metas.map(rm =>
rm.copy(nerlabels = removeDuplicates(rm.nerlabels)))
val rmas = data.metas.map(rm => rm.copy(nerlabels = removeDuplicates(rm.nerlabels)))
ctx.logger.info("Starting find-proposal") *>
rmas.traverse(rm => processAttachment(rm, data.findDates(rm), ctx).map(ml => rm.copy(proposals = ml))).
flatMap(rmv => rmv.traverse(rm =>
ctx.logger.debug(s"Storing attachment proposals: ${rm.proposals}") *>
ctx.store.transact(RAttachmentMeta.updateProposals(rm.id, rm.proposals))).
map(_ => data.copy(metas = rmv)))
rmas
.traverse(rm =>
processAttachment(rm, data.findDates(rm), ctx).map(ml => rm.copy(proposals = ml))
)
.flatMap(rmv =>
rmv
.traverse(rm =>
ctx.logger.debug(s"Storing attachment proposals: ${rm.proposals}") *>
ctx.store.transact(RAttachmentMeta.updateProposals(rm.id, rm.proposals))
)
.map(_ => data.copy(metas = rmv))
)
}
def processAttachment[F[_]: Sync]( rm: RAttachmentMeta
, rd: Vector[NerDateLabel]
, ctx: Context[F, ProcessItemArgs]): F[MetaProposalList] = {
def processAttachment[F[_]: Sync](
rm: RAttachmentMeta,
rd: Vector[NerDateLabel],
ctx: Context[F, ProcessItemArgs]
): F[MetaProposalList] = {
val finder = Finder.searchExact(ctx).next(Finder.searchFuzzy(ctx))
List(finder.find(rm.nerlabels), makeDateProposal(rd)).
traverse(identity).map(MetaProposalList.flatten)
List(finder.find(rm.nerlabels), makeDateProposal(rd))
.traverse(identity)
.map(MetaProposalList.flatten)
}
def makeDateProposal[F[_]: Sync](dates: Vector[NerDateLabel]): F[MetaProposalList] = {
def makeDateProposal[F[_]: Sync](dates: Vector[NerDateLabel]): F[MetaProposalList] =
Timestamp.current[F].map { now =>
val latestFirst = dates.sortWith(_.date isAfter _.date)
val nowDate = now.value.atZone(ZoneId.of("GMT")).toLocalDate
val latestFirst = dates.sortWith((l1, l2) => l1.date.isAfter(l2.date))
val nowDate = now.value.atZone(ZoneId.of("GMT")).toLocalDate
val (after, before) = latestFirst.span(ndl => ndl.date.isAfter(nowDate))
val dueDates = MetaProposalList.fromSeq1(MetaProposalType.DueDate,
after.map(ndl => Candidate(IdRef(Ident.unsafe(ndl.date.toString), ndl.date.toString), Set(ndl.label))))
val itemDates = MetaProposalList.fromSeq1(MetaProposalType.DocDate,
before.map(ndl => Candidate(IdRef(Ident.unsafe(ndl.date.toString), ndl.date.toString), Set(ndl.label))))
val dueDates = MetaProposalList.fromSeq1(
MetaProposalType.DueDate,
after.map(ndl =>
Candidate(IdRef(Ident.unsafe(ndl.date.toString), ndl.date.toString), Set(ndl.label))
)
)
val itemDates = MetaProposalList.fromSeq1(
MetaProposalType.DocDate,
before.map(ndl =>
Candidate(IdRef(Ident.unsafe(ndl.date.toString), ndl.date.toString), Set(ndl.label))
)
)
MetaProposalList.flatten(Seq(dueDates, itemDates))
}
}
def removeDuplicates(labels: List[NerLabel]): List[NerLabel] =
labels.foldLeft((Set.empty[String], List.empty[NerLabel])) { case ((seen, result), el) =>
if (seen.contains(el.tag.name+el.label.toLowerCase)) (seen, result)
else (seen + (el.tag.name + el.label.toLowerCase), el :: result)
}._2.sortBy(_.startPosition)
labels
.foldLeft((Set.empty[String], List.empty[NerLabel])) {
case ((seen, result), el) =>
if (seen.contains(el.tag.name + el.label.toLowerCase)) (seen, result)
else (seen + (el.tag.name + el.label.toLowerCase), el :: result)
}
._2
.sortBy(_.startPosition)
trait Finder[F[_]] { self =>
def find(labels: Seq[NerLabel]): F[MetaProposalList]
@ -80,12 +100,14 @@ object FindProposal {
else f.map(ml1 => ml0.fillEmptyFrom(ml1))
})
def nextWhenEmpty(f: Finder[F], mt0: MetaProposalType, mts: MetaProposalType*)
(implicit F: FlatMap[F], F2: Applicative[F]): Finder[F] =
flatMap(res0 => {
def nextWhenEmpty(f: Finder[F], mt0: MetaProposalType, mts: MetaProposalType*)(
implicit F: FlatMap[F],
F2: Applicative[F]
): Finder[F] =
flatMap { res0 =>
if (res0.hasResults(mt0, mts: _*)) Finder.unit[F](res0)
else f.map(res1 => res0.fillEmptyFrom(res1))
})
}
}
object Finder {
@ -102,7 +124,11 @@ object FindProposal {
labels => labels.toList.traverse(nl => search(nl, false, ctx)).map(MetaProposalList.flatten)
}
private def search[F[_]: Sync](nt: NerLabel, exact: Boolean, ctx: Context[F, ProcessItemArgs]): F[MetaProposalList] = {
private def search[F[_]: Sync](
nt: NerLabel,
exact: Boolean,
ctx: Context[F, ProcessItemArgs]
): F[MetaProposalList] = {
val value =
if (exact) normalizeSearchValue(nt.label)
else s"%${normalizeSearchValue(nt.label)}%"
@ -110,70 +136,84 @@ object FindProposal {
if (exact) 2 else 5
if (value.length < minLength) {
ctx.logger.debug(s"Skipping too small value '$value' (original '${nt.label}').").map(_ => MetaProposalList.empty)
} else nt.tag match {
case NerTag.Organization =>
ctx.logger.debug(s"Looking for organizations: $value") *>
ctx.store.transact(ROrganization.findLike(ctx.args.meta.collective, value)).
map(MetaProposalList.from(MetaProposalType.CorrOrg, nt))
ctx.logger
.debug(s"Skipping too small value '$value' (original '${nt.label}').")
.map(_ => MetaProposalList.empty)
} else
nt.tag match {
case NerTag.Organization =>
ctx.logger.debug(s"Looking for organizations: $value") *>
ctx.store
.transact(ROrganization.findLike(ctx.args.meta.collective, value))
.map(MetaProposalList.from(MetaProposalType.CorrOrg, nt))
case NerTag.Person =>
val s1 = ctx.store.transact(RPerson.findLike(ctx.args.meta.collective, value, true)).
map(MetaProposalList.from(MetaProposalType.ConcPerson, nt))
val s2 = ctx.store.transact(RPerson.findLike(ctx.args.meta.collective, value, false)).
map(MetaProposalList.from(MetaProposalType.CorrPerson, nt))
ctx.logger.debug(s"Looking for persons: $value") *> (for {
ml0 <- s1
ml1 <- s2
} yield ml0 |+| ml1)
case NerTag.Person =>
val s1 = ctx.store
.transact(RPerson.findLike(ctx.args.meta.collective, value, true))
.map(MetaProposalList.from(MetaProposalType.ConcPerson, nt))
val s2 = ctx.store
.transact(RPerson.findLike(ctx.args.meta.collective, value, false))
.map(MetaProposalList.from(MetaProposalType.CorrPerson, nt))
ctx.logger.debug(s"Looking for persons: $value") *> (for {
ml0 <- s1
ml1 <- s2
} yield ml0 |+| ml1)
case NerTag.Location =>
ctx.logger.debug(s"NerTag 'Location' is currently not used. Ignoring value '$value'.").
map(_ => MetaProposalList.empty)
case NerTag.Location =>
ctx.logger
.debug(s"NerTag 'Location' is currently not used. Ignoring value '$value'.")
.map(_ => MetaProposalList.empty)
case NerTag.Misc =>
ctx.logger.debug(s"Looking for equipments: $value") *>
ctx.store.transact(REquipment.findLike(ctx.args.meta.collective, value)).
map(MetaProposalList.from(MetaProposalType.ConcEquip, nt))
case NerTag.Misc =>
ctx.logger.debug(s"Looking for equipments: $value") *>
ctx.store
.transact(REquipment.findLike(ctx.args.meta.collective, value))
.map(MetaProposalList.from(MetaProposalType.ConcEquip, nt))
case NerTag.Email =>
searchContact(nt, ContactKind.Email, value, ctx)
case NerTag.Email =>
searchContact(nt, ContactKind.Email, value, ctx)
case NerTag.Website =>
if (!exact) {
val searchString = Domain.domainFromUri(nt.label.toLowerCase).
toOption.
map(_.toPrimaryDomain.asString).
map(s => s"%$s%").
getOrElse(value)
searchContact(nt, ContactKind.Website, searchString, ctx)
} else {
searchContact(nt, ContactKind.Website, value, ctx)
}
case NerTag.Website =>
if (!exact) {
val searchString = Domain
.domainFromUri(nt.label.toLowerCase)
.toOption
.map(_.toPrimaryDomain.asString)
.map(s => s"%$s%")
.getOrElse(value)
searchContact(nt, ContactKind.Website, searchString, ctx)
} else {
searchContact(nt, ContactKind.Website, value, ctx)
}
case NerTag.Date =>
// There is no database search required for this tag
MetaProposalList.empty.pure[F]
}
case NerTag.Date =>
// There is no database search required for this tag
MetaProposalList.empty.pure[F]
}
}
private def searchContact[F[_]: Sync]( nt: NerLabel
, kind: ContactKind
, value: String
, ctx: Context[F, ProcessItemArgs]): F[MetaProposalList] = {
val orgs = ctx.store.transact(ROrganization.findLike(ctx.args.meta.collective, kind, value)).
map(MetaProposalList.from(MetaProposalType.CorrOrg, nt))
val corrP = ctx.store.transact(RPerson.findLike(ctx.args.meta.collective, kind, value, false)).
map(MetaProposalList.from(MetaProposalType.CorrPerson, nt))
val concP = ctx.store.transact(RPerson.findLike(ctx.args.meta.collective, kind, value, true)).
map(MetaProposalList.from(MetaProposalType.CorrPerson, nt))
private def searchContact[F[_]: Sync](
nt: NerLabel,
kind: ContactKind,
value: String,
ctx: Context[F, ProcessItemArgs]
): F[MetaProposalList] = {
val orgs = ctx.store
.transact(ROrganization.findLike(ctx.args.meta.collective, kind, value))
.map(MetaProposalList.from(MetaProposalType.CorrOrg, nt))
val corrP = ctx.store
.transact(RPerson.findLike(ctx.args.meta.collective, kind, value, false))
.map(MetaProposalList.from(MetaProposalType.CorrPerson, nt))
val concP = ctx.store
.transact(RPerson.findLike(ctx.args.meta.collective, kind, value, true))
.map(MetaProposalList.from(MetaProposalType.CorrPerson, nt))
ctx.logger.debug(s"Looking with $kind: $value") *>
List(orgs, corrP, concP).traverse(identity).map(MetaProposalList.flatten)
}
// The backslash *must* be stripped from search strings.
private [this] val invalidSearch =
private[this] val invalidSearch =
"…_[]^<>=&ſ/{}*?@#$|~`+%\"';\\".toSet
private def normalizeSearchValue(str: String): String =

View File

@ -4,10 +4,12 @@ import docspell.common.{Ident, NerDateLabel, NerLabel}
import docspell.joex.process.ItemData.AttachmentDates
import docspell.store.records.{RAttachment, RAttachmentMeta, RItem}
case class ItemData( item: RItem
, attachments: Vector[RAttachment]
, metas: Vector[RAttachmentMeta]
, dateLabels: Vector[AttachmentDates]) {
case class ItemData(
item: RItem,
attachments: Vector[RAttachment],
metas: Vector[RAttachmentMeta],
dateLabels: Vector[AttachmentDates]
) {
def findMeta(attachId: Ident): Option[RAttachmentMeta] =
metas.find(_.id == attachId)
@ -16,7 +18,6 @@ case class ItemData( item: RItem
dateLabels.find(m => m.rm.id == rm.id).map(_.dates).getOrElse(Vector.empty)
}
object ItemData {
case class AttachmentDates(rm: RAttachmentMeta, dates: Vector[NerDateLabel]) {
@ -24,4 +25,4 @@ object ItemData {
dates.map(dl => dl.label.copy(label = dl.date.toString))
}
}
}

View File

@ -10,14 +10,13 @@ import docspell.text.ocr.{Config => OcrConfig}
object ItemHandler {
def onCancel[F[_]: Sync: ContextShift]: Task[F, ProcessItemArgs, Unit] =
logWarn("Now cancelling. Deleting potentially created data.").
flatMap(_ => deleteByFileIds)
logWarn("Now cancelling. Deleting potentially created data.").flatMap(_ => deleteByFileIds)
def apply[F[_]: Sync: ContextShift](cfg: OcrConfig): Task[F, ProcessItemArgs, Unit] =
CreateItem[F].
flatMap(itemStateTask(ItemState.Processing)).
flatMap(safeProcess[F](cfg)).
map(_ => ())
CreateItem[F]
.flatMap(itemStateTask(ItemState.Processing))
.flatMap(safeProcess[F](cfg))
.map(_ => ())
def itemStateTask[F[_]: Sync, A](state: ItemState)(data: ItemData): Task[F, A, ItemData] =
Task { ctx =>
@ -26,26 +25,25 @@ object ItemHandler {
def isLastRetry[F[_]: Sync, A](ctx: Context[F, A]): F[Boolean] =
for {
current <- ctx.store.transact(RJob.getRetries(ctx.jobId))
last = ctx.config.retries == current.getOrElse(0)
current <- ctx.store.transact(RJob.getRetries(ctx.jobId))
last = ctx.config.retries == current.getOrElse(0)
} yield last
def safeProcess[F[_]: Sync: ContextShift](cfg: OcrConfig)(data: ItemData): Task[F, ProcessItemArgs, ItemData] =
def safeProcess[F[_]: Sync: ContextShift](
cfg: OcrConfig
)(data: ItemData): Task[F, ProcessItemArgs, ItemData] =
Task(isLastRetry[F, ProcessItemArgs] _).flatMap {
case true =>
ProcessItem[F](cfg)(data).
attempt.flatMap({
ProcessItem[F](cfg)(data).attempt.flatMap({
case Right(d) =>
Task.pure(d)
case Left(ex) =>
logWarn[F]("Processing failed on last retry. Creating item but without proposals.").
flatMap(_ => itemStateTask(ItemState.Created)(data)).
andThen(_ => Sync[F].raiseError(ex))
logWarn[F]("Processing failed on last retry. Creating item but without proposals.")
.flatMap(_ => itemStateTask(ItemState.Created)(data))
.andThen(_ => Sync[F].raiseError(ex))
})
case false =>
ProcessItem[F](cfg)(data).
flatMap(itemStateTask(ItemState.Created))
ProcessItem[F](cfg)(data).flatMap(itemStateTask(ItemState.Created))
}
def deleteByFileIds[F[_]: Sync: ContextShift]: Task[F, ProcessItemArgs, Unit] =

View File

@ -13,28 +13,40 @@ object LinkProposal {
val proposals = MetaProposalList.flatten(data.metas.map(_.proposals))
ctx.logger.info(s"Starting linking proposals") *>
MetaProposalType.all.
traverse(applyValue(data, proposals, ctx)).
map(result => ctx.logger.info(s"Results from proposal processing: $result")).
map(_ => data)
MetaProposalType.all
.traverse(applyValue(data, proposals, ctx))
.map(result => ctx.logger.info(s"Results from proposal processing: $result"))
.map(_ => data)
}
def applyValue[F[_]: Sync](data: ItemData, proposalList: MetaProposalList, ctx: Context[F, ProcessItemArgs])(mpt: MetaProposalType): F[Result] = {
def applyValue[F[_]: Sync](
data: ItemData,
proposalList: MetaProposalList,
ctx: Context[F, ProcessItemArgs]
)(mpt: MetaProposalType): F[Result] =
proposalList.find(mpt) match {
case None =>
Result.noneFound(mpt).pure[F]
case Some(a) if a.isSingleValue =>
ctx.logger.info(s"Found one candidate for ${a.proposalType}") *>
setItemMeta(data.item.id, ctx, a.proposalType, a.values.head.ref.id).
map(_ => Result.single(mpt))
setItemMeta(data.item.id, ctx, a.proposalType, a.values.head.ref.id).map(_ =>
Result.single(mpt)
)
case Some(a) =>
ctx.logger.info(s"Found many (${a.size}, ${a.values.map(_.ref.id.id)}) candidates for ${a.proposalType}. Setting first.") *>
setItemMeta(data.item.id, ctx, a.proposalType, a.values.head.ref.id).
map(_ => Result.multiple(mpt))
ctx.logger.info(
s"Found many (${a.size}, ${a.values.map(_.ref.id.id)}) candidates for ${a.proposalType}. Setting first."
) *>
setItemMeta(data.item.id, ctx, a.proposalType, a.values.head.ref.id).map(_ =>
Result.multiple(mpt)
)
}
}
def setItemMeta[F[_]: Sync](itemId: Ident, ctx: Context[F, ProcessItemArgs], mpt: MetaProposalType, value: Ident): F[Int] =
def setItemMeta[F[_]: Sync](
itemId: Ident,
ctx: Context[F, ProcessItemArgs],
mpt: MetaProposalType,
value: Ident
): F[Int] =
mpt match {
case MetaProposalType.CorrOrg =>
ctx.logger.debug(s"Updating item organization with: ${value.id}") *>
@ -54,18 +66,17 @@ object LinkProposal {
ctx.logger.debug(s"Not linking document date suggestion ${value.id}").map(_ => 0)
}
sealed trait Result {
def proposalType: MetaProposalType
}
object Result {
case class NoneFound(proposalType: MetaProposalType) extends Result
case class SingleResult(proposalType: MetaProposalType) extends Result
case class NoneFound(proposalType: MetaProposalType) extends Result
case class SingleResult(proposalType: MetaProposalType) extends Result
case class MultipleResult(proposalType: MetaProposalType) extends Result
def noneFound(proposalType: MetaProposalType): Result = NoneFound(proposalType)
def single(proposalType: MetaProposalType): Result = SingleResult(proposalType)
def multiple(proposalType: MetaProposalType): Result = MultipleResult(proposalType)
def single(proposalType: MetaProposalType): Result = SingleResult(proposalType)
def multiple(proposalType: MetaProposalType): Result = MultipleResult(proposalType)
}
}

View File

@ -7,13 +7,15 @@ import docspell.text.ocr.{Config => OcrConfig}
object ProcessItem {
def apply[F[_]: Sync: ContextShift](cfg: OcrConfig)(item: ItemData): Task[F, ProcessItemArgs, ItemData] =
TextExtraction(cfg, item).
flatMap(Task.setProgress(25)).
flatMap(TextAnalysis[F]).
flatMap(Task.setProgress(50)).
flatMap(FindProposal[F]).
flatMap(Task.setProgress(75)).
flatMap(LinkProposal[F]).
flatMap(Task.setProgress(99))
def apply[F[_]: Sync: ContextShift](
cfg: OcrConfig
)(item: ItemData): Task[F, ProcessItemArgs, ItemData] =
TextExtraction(cfg, item)
.flatMap(Task.setProgress(25))
.flatMap(TextAnalysis[F])
.flatMap(Task.setProgress(50))
.flatMap(FindProposal[F])
.flatMap(Task.setProgress(75))
.flatMap(LinkProposal[F])
.flatMap(Task.setProgress(99))
}

View File

@ -8,7 +8,7 @@ import docspell.joex.scheduler.Task
import org.log4s._
object TestTasks {
private [this] val logger = getLogger
private[this] val logger = getLogger
def success[F[_]]: Task[F, ProcessItemArgs, Unit] =
Task { ctx =>
@ -17,23 +17,23 @@ object TestTasks {
def failing[F[_]: Sync]: Task[F, ProcessItemArgs, Unit] =
Task { ctx =>
ctx.logger.info(s"Failing the task run :(").map(_ =>
sys.error("Oh, cannot extract gold from this document")
)
ctx.logger
.info(s"Failing the task run :(")
.map(_ => sys.error("Oh, cannot extract gold from this document"))
}
def longRunning[F[_]: Sync]: Task[F, ProcessItemArgs, Unit] =
Task { ctx =>
logger.fwarn(s"${Thread.currentThread()} From executing long running task") >>
ctx.logger.info(s"${Thread.currentThread()} Running task now: ${ctx.args}") >>
sleep(2400) >>
ctx.logger.debug("doing things") >>
sleep(2400) >>
ctx.logger.debug("doing more things") >>
sleep(2400) >>
ctx.logger.info("doing more things")
ctx.logger.info(s"${Thread.currentThread()} Running task now: ${ctx.args}") >>
sleep(2400) >>
ctx.logger.debug("doing things") >>
sleep(2400) >>
ctx.logger.debug("doing more things") >>
sleep(2400) >>
ctx.logger.info("doing more things")
}
private def sleep[F[_]:Sync](ms: Long): F[Unit] =
private def sleep[F[_]: Sync](ms: Long): F[Unit] =
Sync[F].delay(Thread.sleep(ms))
}

View File

@ -15,35 +15,42 @@ object TextAnalysis {
def apply[F[_]: Sync](item: ItemData): Task[F, ProcessItemArgs, ItemData] =
Task { ctx =>
for {
_ <- ctx.logger.info("Starting text analysis")
s <- Duration.stopTime[F]
t <- item.metas.toList.traverse(annotateAttachment[F](ctx.args.meta.language))
_ <- ctx.logger.debug(s"Storing tags: ${t.map(_._1.copy(content = None))}")
_ <- t.traverse(m => ctx.store.transact(RAttachmentMeta.updateLabels(m._1.id, m._1.nerlabels)))
e <- s
_ <- ctx.logger.info(s"Text-Analysis finished in ${e.formatExact}")
v = t.toVector
_ <- ctx.logger.info("Starting text analysis")
s <- Duration.stopTime[F]
t <- item.metas.toList.traverse(annotateAttachment[F](ctx.args.meta.language))
_ <- ctx.logger.debug(s"Storing tags: ${t.map(_._1.copy(content = None))}")
_ <- t.traverse(m =>
ctx.store.transact(RAttachmentMeta.updateLabels(m._1.id, m._1.nerlabels))
)
e <- s
_ <- ctx.logger.info(s"Text-Analysis finished in ${e.formatExact}")
v = t.toVector
} yield item.copy(metas = v.map(_._1), dateLabels = v.map(_._2))
}
def annotateAttachment[F[_]: Sync](lang: Language)(rm: RAttachmentMeta): F[(RAttachmentMeta, AttachmentDates)] =
def annotateAttachment[F[_]: Sync](
lang: Language
)(rm: RAttachmentMeta): F[(RAttachmentMeta, AttachmentDates)] =
for {
list0 <- stanfordNer[F](lang, rm)
list1 <- contactNer[F](rm)
dates <- dateNer[F](rm, lang)
} yield (rm.copy(nerlabels = (list0 ++ list1 ++ dates.toNerLabel).toList), dates)
def stanfordNer[F[_]: Sync](lang: Language, rm: RAttachmentMeta): F[Vector[NerLabel]] = Sync[F].delay {
rm.content.map(StanfordNerClassifier.nerAnnotate(lang)).getOrElse(Vector.empty)
}
def stanfordNer[F[_]: Sync](lang: Language, rm: RAttachmentMeta): F[Vector[NerLabel]] =
Sync[F].delay {
rm.content.map(StanfordNerClassifier.nerAnnotate(lang)).getOrElse(Vector.empty)
}
def contactNer[F[_]: Sync](rm: RAttachmentMeta): F[Vector[NerLabel]] = Sync[F].delay {
rm.content.map(Contact.annotate).getOrElse(Vector.empty)
}
def dateNer[F[_]: Sync](rm: RAttachmentMeta, lang: Language): F[AttachmentDates] = Sync[F].delay {
AttachmentDates(rm, rm.content.map(txt => DateFind.findDates(txt, lang).toVector).getOrElse(Vector.empty))
AttachmentDates(
rm,
rm.content.map(txt => DateFind.findDates(txt, lang).toVector).getOrElse(Vector.empty)
)
}
}

View File

@ -11,10 +11,13 @@ import docspell.text.ocr.{TextExtract, Config => OcrConfig}
object TextExtraction {
def apply[F[_]: Sync : ContextShift](cfg: OcrConfig, item: ItemData): Task[F, ProcessItemArgs, ItemData] =
def apply[F[_]: Sync: ContextShift](
cfg: OcrConfig,
item: ItemData
): Task[F, ProcessItemArgs, ItemData] =
Task { ctx =>
for {
_ <- ctx.logger.info("Starting text extraction")
_ <- ctx.logger.info("Starting text extraction")
start <- Duration.stopTime[F]
txt <- item.attachments.traverse(extractTextToMeta(ctx, cfg, ctx.args.meta.language))
_ <- ctx.logger.debug("Storing extracted texts")
@ -24,22 +27,33 @@ object TextExtraction {
} yield item.copy(metas = txt)
}
def extractTextToMeta[F[_]: Sync : ContextShift](ctx: Context[F, _], cfg: OcrConfig, lang: Language)(ra: RAttachment): F[RAttachmentMeta] =
def extractTextToMeta[F[_]: Sync: ContextShift](
ctx: Context[F, _],
cfg: OcrConfig,
lang: Language
)(ra: RAttachment): F[RAttachmentMeta] =
for {
_ <- ctx.logger.debug(s"Extracting text for attachment ${ra.name}")
dst <- Duration.stopTime[F]
txt <- extractText(cfg, lang, ctx.store, ctx.blocker)(ra)
meta = RAttachmentMeta.empty(ra.id).copy(content = txt.map(_.trim).filter(_.nonEmpty))
est <- dst
_ <- ctx.logger.debug(s"Extracting text for attachment ${ra.name} finished in ${est.formatExact}")
_ <- ctx.logger.debug(s"Extracting text for attachment ${ra.name}")
dst <- Duration.stopTime[F]
txt <- extractText(cfg, lang, ctx.store, ctx.blocker)(ra)
meta = RAttachmentMeta.empty(ra.id).copy(content = txt.map(_.trim).filter(_.nonEmpty))
est <- dst
_ <- ctx.logger.debug(
s"Extracting text for attachment ${ra.name} finished in ${est.formatExact}"
)
} yield meta
def extractText[F[_]: Sync : ContextShift](ocrConfig: OcrConfig, lang: Language, store: Store[F], blocker: Blocker)(ra: RAttachment): F[Option[String]] = {
val data = store.bitpeace.get(ra.fileId.id).
unNoneTerminate.
through(store.bitpeace.fetchData2(RangeDef.all))
def extractText[F[_]: Sync: ContextShift](
ocrConfig: OcrConfig,
lang: Language,
store: Store[F],
blocker: Blocker
)(ra: RAttachment): F[Option[String]] = {
val data = store.bitpeace
.get(ra.fileId.id)
.unNoneTerminate
.through(store.bitpeace.fetchData2(RangeDef.all))
TextExtract.extract(data, blocker, lang.iso3, ocrConfig).
compile.last
TextExtract.extract(data, blocker, lang.iso3, ocrConfig).compile.last
}
}

View File

@ -10,15 +10,19 @@ import org.http4s.dsl.Http4sDsl
object InfoRoutes {
def apply[F[_]: Sync](): HttpRoutes[F] = {
val dsl = new Http4sDsl[F]{}
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of[F] {
case GET -> (Root / "version") =>
Ok(VersionInfo(BuildInfo.version
, BuildInfo.builtAtMillis
, BuildInfo.builtAtString
, BuildInfo.gitHeadCommit.getOrElse("")
, BuildInfo.gitDescribedVersion.getOrElse("")))
Ok(
VersionInfo(
BuildInfo.version,
BuildInfo.builtAtMillis,
BuildInfo.builtAtString,
BuildInfo.gitHeadCommit.getOrElse(""),
BuildInfo.gitDescribedVersion.getOrElse("")
)
)
}
}
}

View File

@ -13,7 +13,7 @@ import org.http4s.dsl.Http4sDsl
object JoexRoutes {
def apply[F[_]: ConcurrentEffect: Timer](app: JoexApp[F]): HttpRoutes[F] = {
val dsl = new Http4sDsl[F]{}
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of[F] {
case POST -> Root / "notify" =>
@ -24,14 +24,16 @@ object JoexRoutes {
case GET -> Root / "running" =>
for {
jobs <- app.scheduler.getRunning
jj = jobs.map(mkJob)
resp <- Ok(JobList(jj.toList))
jobs <- app.scheduler.getRunning
jj = jobs.map(mkJob)
resp <- Ok(JobList(jj.toList))
} yield resp
case POST -> Root / "shutdownAndExit" =>
for {
_ <- ConcurrentEffect[F].start(Timer[F].sleep(Duration.seconds(1).toScala) *> app.initShutdown)
_ <- ConcurrentEffect[F].start(
Timer[F].sleep(Duration.seconds(1).toScala) *> app.initShutdown
)
resp <- Ok(BasicResult(true, "Shutdown initiated."))
} yield resp
@ -39,20 +41,28 @@ object JoexRoutes {
for {
optJob <- app.scheduler.getRunning.map(_.find(_.id == id))
optLog <- optJob.traverse(j => app.findLogs(j.id))
jAndL = for { job <- optJob; log <- optLog } yield mkJobLog(job, log)
jAndL = for { job <- optJob; log <- optLog } yield mkJobLog(job, log)
resp <- jAndL.map(Ok(_)).getOrElse(NotFound(BasicResult(false, "Not found")))
} yield resp
case POST -> Root / "job" / Ident(id) / "cancel" =>
for {
flag <- app.scheduler.requestCancel(id)
flag <- app.scheduler.requestCancel(id)
resp <- Ok(BasicResult(flag, if (flag) "Cancel request submitted" else "Job not found"))
} yield resp
}
}
def mkJob(j: RJob): Job =
Job(j.id, j.subject, j.submitted, j.priority, j.retries, j.progress, j.started.getOrElse(Timestamp.Epoch))
Job(
j.id,
j.subject,
j.submitted,
j.priority,
j.retries,
j.progress,
j.started.getOrElse(Timestamp.Epoch)
)
def mkJobLog(j: RJob, jl: Vector[RJobLog]): JobAndLog =
JobAndLog(mkJob(j), jl.map(r => JobLogEvent(r.created, r.level, r.message)).toList)

View File

@ -30,40 +30,45 @@ trait Context[F[_], A] { self =>
}
object Context {
private [this] val log = getLogger
private[this] val log = getLogger
def create[F[_]: Functor, A]( job: RJob
, arg: A
, config: SchedulerConfig
, log: Logger[F]
, store: Store[F]
, blocker: Blocker): Context[F, A] =
def create[F[_]: Functor, A](
job: RJob,
arg: A,
config: SchedulerConfig,
log: Logger[F],
store: Store[F],
blocker: Blocker
): Context[F, A] =
new ContextImpl(arg, log, store, blocker, config, job.id)
def apply[F[_]: Concurrent, A]( job: RJob
, arg: A
, config: SchedulerConfig
, logSink: LogSink[F]
, blocker: Blocker
, store: Store[F]): F[Context[F, A]] =
def apply[F[_]: Concurrent, A](
job: RJob,
arg: A,
config: SchedulerConfig,
logSink: LogSink[F],
blocker: Blocker,
store: Store[F]
): F[Context[F, A]] =
for {
_ <- log.ftrace("Creating logger for task run")
logger <- Logger(job.id, job.info, config.logBufferSize, logSink)
_ <- log.ftrace("Logger created, instantiating context")
ctx = create[F, A](job, arg, config, logger, store, blocker)
ctx = create[F, A](job, arg, config, logger, store, blocker)
} yield ctx
private final class ContextImpl[F[_]: Functor, A]( val args: A
, val logger: Logger[F]
, val store: Store[F]
, val blocker: Blocker
, val config: SchedulerConfig
, val jobId: Ident)
extends Context[F,A] {
final private class ContextImpl[F[_]: Functor, A](
val args: A,
val logger: Logger[F],
val store: Store[F],
val blocker: Blocker,
val config: SchedulerConfig,
val jobId: Ident
) extends Context[F, A] {
def setProgress(percent: Int): F[Unit] = {
val pval = math.min(100, math.max(0, percent))
store.transact(RJob.setProgress(jobId, pval)).map(_ => ())
}
def setProgress(percent: Int): F[Unit] = {
val pval = math.min(100, math.max(0, percent))
store.transact(RJob.setProgress(jobId, pval)).map(_ => ())
}
}
}
}

View File

@ -11,14 +11,13 @@ import docspell.common.Priority
*/
case class CountingScheme(high: Int, low: Int, counter: Int = 0) {
def nextPriority: (CountingScheme, Priority) = {
def nextPriority: (CountingScheme, Priority) =
if (counter <= 0) (increment, Priority.High)
else {
val rest = counter % (high + low)
if (rest < high) (increment, Priority.High)
else (increment, Priority.Low)
}
}
def increment: CountingScheme =
copy(counter = counter + 1)
@ -32,8 +31,7 @@ object CountingScheme {
def readString(str: String): Either[String, CountingScheme] =
str.split(',') match {
case Array(h, l) =>
Either.catchNonFatal(CountingScheme(h.toInt, l.toInt)).
left.map(_.getMessage)
Either.catchNonFatal(CountingScheme(h.toInt, l.toInt)).left.map(_.getMessage)
case _ =>
Left(s"Invalid counting scheme: $str")
}

View File

@ -20,13 +20,16 @@ case class JobTask[F[_]](name: Ident, task: Task[F, String, Unit], onCancel: Tas
object JobTask {
def json[F[_]: Sync, A](name: Ident, task: Task[F, A, Unit], onCancel: Task[F, A, Unit])
(implicit D: Decoder[A]): JobTask[F] = {
def json[F[_]: Sync, A](name: Ident, task: Task[F, A, Unit], onCancel: Task[F, A, Unit])(
implicit D: Decoder[A]
): JobTask[F] = {
val convert: String => F[A] =
str => str.parseJsonAs[A] match {
case Right(a) => a.pure[F]
case Left(ex) => Sync[F].raiseError(new Exception(s"Cannot parse task arguments: $str", ex))
}
str =>
str.parseJsonAs[A] match {
case Right(a) => a.pure[F]
case Left(ex) =>
Sync[F].raiseError(new Exception(s"Cannot parse task arguments: $str", ex))
}
JobTask(name, task.contramap(convert), onCancel.contramap(convert))
}

View File

@ -4,12 +4,14 @@ import cats.implicits._
import docspell.common._
import cats.effect.Sync
case class LogEvent( jobId: Ident
, jobInfo: String
, time: Timestamp
, level: LogLevel
, msg: String
, ex: Option[Throwable] = None) {
case class LogEvent(
jobId: Ident,
jobInfo: String,
time: Timestamp,
level: LogLevel,
msg: String,
ex: Option[Throwable] = None
) {
def logLine: String =
s">>> ${time.asString} $level $jobInfo: $msg"
@ -21,5 +23,4 @@ object LogEvent {
def create[F[_]: Sync](jobId: Ident, jobInfo: String, level: LogLevel, msg: String): F[LogEvent] =
Timestamp.current[F].map(now => LogEvent(jobId, jobInfo, now, level, msg))
}

View File

@ -44,12 +44,22 @@ object LogSink {
LogSink(_.evalMap(e => logInternal(e)))
def db[F[_]: Sync](store: Store[F]): LogSink[F] =
LogSink(_.evalMap(ev => for {
id <- Ident.randomId[F]
joblog = RJobLog(id, ev.jobId, ev.level, ev.time, ev.msg + ev.ex.map(th => ": "+ th.getMessage).getOrElse(""))
_ <- logInternal(ev)
_ <- store.transact(RJobLog.insert(joblog))
} yield ()))
LogSink(
_.evalMap(ev =>
for {
id <- Ident.randomId[F]
joblog = RJobLog(
id,
ev.jobId,
ev.level,
ev.time,
ev.msg + ev.ex.map(th => ": " + th.getMessage).getOrElse("")
)
_ <- logInternal(ev)
_ <- store.transact(RJobLog.insert(joblog))
} yield ()
)
)
def dbAndLog[F[_]: Concurrent](store: Store[F]): LogSink[F] = {
val s: Stream[F, Pipe[F, LogEvent, Unit]] =

View File

@ -33,17 +33,25 @@ object Logger {
LogEvent.create[F](jobId, jobInfo, LogLevel.Warn, msg).flatMap(q.enqueue1)
def error(ex: Throwable)(msg: => String): F[Unit] =
LogEvent.create[F](jobId, jobInfo, LogLevel.Error, msg).map(le => le.copy(ex = Some(ex))).flatMap(q.enqueue1)
LogEvent
.create[F](jobId, jobInfo, LogLevel.Error, msg)
.map(le => le.copy(ex = Some(ex)))
.flatMap(q.enqueue1)
def error(msg: => String): F[Unit] =
LogEvent.create[F](jobId, jobInfo, LogLevel.Error, msg).flatMap(q.enqueue1)
}
def apply[F[_]: Concurrent](jobId: Ident, jobInfo: String, bufferSize: Int, sink: LogSink[F]): F[Logger[F]] =
def apply[F[_]: Concurrent](
jobId: Ident,
jobInfo: String,
bufferSize: Int,
sink: LogSink[F]
): F[Logger[F]] =
for {
q <- Queue.circularBuffer[F, LogEvent](bufferSize)
log = create(jobId, jobInfo, q)
_ <- Concurrent[F].start(q.dequeue.through(sink.receive).compile.drain)
q <- Queue.circularBuffer[F, LogEvent](bufferSize)
log = create(jobId, jobInfo, q)
_ <- Concurrent[F].start(q.dequeue.through(sink.receive).compile.drain)
} yield log
}
}

View File

@ -7,13 +7,14 @@ import docspell.store.Store
import docspell.store.queue.JobQueue
import fs2.concurrent.SignallingRef
case class SchedulerBuilder[F[_]: ConcurrentEffect : ContextShift](
config: SchedulerConfig
, tasks: JobTaskRegistry[F]
, store: Store[F]
, blocker: Blocker
, queue: Resource[F, JobQueue[F]]
, logSink: LogSink[F]) {
case class SchedulerBuilder[F[_]: ConcurrentEffect: ContextShift](
config: SchedulerConfig,
tasks: JobTaskRegistry[F],
store: Store[F],
blocker: Blocker,
queue: Resource[F, JobQueue[F]],
logSink: LogSink[F]
) {
def withConfig(cfg: SchedulerConfig): SchedulerBuilder[F] =
copy(config = cfg)
@ -33,7 +34,6 @@ case class SchedulerBuilder[F[_]: ConcurrentEffect : ContextShift](
def withLogSink(sink: LogSink[F]): SchedulerBuilder[F] =
copy(logSink = sink)
def serve: Resource[F, Scheduler[F]] =
resource.evalMap(sch => ConcurrentEffect[F].start(sch.start.compile.drain).map(_ => sch))
@ -45,22 +45,25 @@ case class SchedulerBuilder[F[_]: ConcurrentEffect : ContextShift](
perms <- Resource.liftF(Semaphore(config.poolSize.toLong))
} yield new SchedulerImpl[F](config, blocker, jq, tasks, store, logSink, state, waiter, perms)
scheduler.evalTap(_.init).
map(s => s: Scheduler[F])
scheduler.evalTap(_.init).map(s => s: Scheduler[F])
}
}
object SchedulerBuilder {
def apply[F[_]: ConcurrentEffect : ContextShift]( config: SchedulerConfig
, blocker: Blocker
, store: Store[F]): SchedulerBuilder[F] =
new SchedulerBuilder[F](config
, JobTaskRegistry.empty[F]
, store
, blocker
, JobQueue(store)
, LogSink.db[F](store))
def apply[F[_]: ConcurrentEffect: ContextShift](
config: SchedulerConfig,
blocker: Blocker,
store: Store[F]
): SchedulerBuilder[F] =
new SchedulerBuilder[F](
config,
JobTaskRegistry.empty[F],
store,
blocker,
JobQueue(store),
LogSink.db[F](store)
)
}

View File

@ -2,24 +2,26 @@ package docspell.joex.scheduler
import docspell.common._
case class SchedulerConfig( name: Ident
, poolSize: Int
, countingScheme: CountingScheme
, retries: Int
, retryDelay: Duration
, logBufferSize: Int
, wakeupPeriod: Duration
)
case class SchedulerConfig(
name: Ident,
poolSize: Int,
countingScheme: CountingScheme,
retries: Int,
retryDelay: Duration,
logBufferSize: Int,
wakeupPeriod: Duration
)
object SchedulerConfig {
val default = SchedulerConfig(
name = Ident.unsafe("default-scheduler")
, poolSize = 2 // math.max(2, Runtime.getRuntime.availableProcessors / 2)
, countingScheme = CountingScheme(2, 1)
, retries = 5
, retryDelay = Duration.seconds(30)
, logBufferSize = 500
, wakeupPeriod = Duration.minutes(10)
name = Ident.unsafe("default-scheduler"),
poolSize = 2 // math.max(2, Runtime.getRuntime.availableProcessors / 2)
,
countingScheme = CountingScheme(2, 1),
retries = 5,
retryDelay = Duration.seconds(30),
logBufferSize = 500,
wakeupPeriod = Duration.minutes(10)
)
}

View File

@ -14,17 +14,19 @@ import SchedulerImpl._
import docspell.store.Store
import docspell.store.queries.QJob
final class SchedulerImpl[F[_]: ConcurrentEffect : ContextShift](val config: SchedulerConfig
, blocker: Blocker
, queue: JobQueue[F]
, tasks: JobTaskRegistry[F]
, store: Store[F]
, logSink: LogSink[F]
, state: SignallingRef[F, State[F]]
, waiter: SignallingRef[F, Boolean]
, permits: Semaphore[F]) extends Scheduler[F] {
final class SchedulerImpl[F[_]: ConcurrentEffect: ContextShift](
val config: SchedulerConfig,
blocker: Blocker,
queue: JobQueue[F],
tasks: JobTaskRegistry[F],
store: Store[F],
logSink: LogSink[F],
state: SignallingRef[F, State[F]],
waiter: SignallingRef[F, Boolean],
permits: Semaphore[F]
) extends Scheduler[F] {
private [this] val logger = getLogger
private[this] val logger = getLogger
/**
* On startup, get all jobs in state running from this scheduler
@ -34,8 +36,13 @@ final class SchedulerImpl[F[_]: ConcurrentEffect : ContextShift](val config: Sch
QJob.runningToWaiting(config.name, store)
def periodicAwake(implicit T: Timer[F]): F[Fiber[F, Unit]] =
ConcurrentEffect[F].start(Stream.awakeEvery[F](config.wakeupPeriod.toScala).
evalMap(_ => logger.fdebug("Periodic awake reached") *> notifyChange).compile.drain)
ConcurrentEffect[F].start(
Stream
.awakeEvery[F](config.wakeupPeriod.toScala)
.evalMap(_ => logger.fdebug("Periodic awake reached") *> notifyChange)
.compile
.drain
)
def getRunning: F[Vector[RJob]] =
state.get.flatMap(s => QJob.findAll(s.getRunning, store))
@ -43,7 +50,7 @@ final class SchedulerImpl[F[_]: ConcurrentEffect : ContextShift](val config: Sch
def requestCancel(jobId: Ident): F[Boolean] =
state.get.flatMap(_.cancelRequest(jobId) match {
case Some(ct) => ct.map(_ => true)
case None => logger.fwarn(s"Job ${jobId.id} not found, cannot cancel.").map(_ => false)
case None => logger.fwarn(s"Job ${jobId.id} not found, cannot cancel.").map(_ => false)
})
def notifyChange: F[Unit] =
@ -51,59 +58,72 @@ final class SchedulerImpl[F[_]: ConcurrentEffect : ContextShift](val config: Sch
def shutdown(cancelAll: Boolean): F[Unit] = {
val doCancel =
state.get.
flatMap(_.cancelTokens.values.toList.traverse(identity)).
map(_ => ())
state.get.flatMap(_.cancelTokens.values.toList.traverse(identity)).map(_ => ())
val runShutdown =
state.modify(_.requestShutdown) *> (if (cancelAll) doCancel else ().pure[F])
val wait = Stream.eval(runShutdown).
evalMap(_ => logger.finfo("Scheduler is shutting down now.")).
flatMap(_ => Stream.eval(state.get) ++ Stream.suspend(state.discrete.takeWhile(_.getRunning.nonEmpty))).
flatMap(state => {
val wait = Stream
.eval(runShutdown)
.evalMap(_ => logger.finfo("Scheduler is shutting down now."))
.flatMap(_ =>
Stream.eval(state.get) ++ Stream.suspend(state.discrete.takeWhile(_.getRunning.nonEmpty))
)
.flatMap { state =>
if (state.getRunning.isEmpty) Stream.eval(logger.finfo("No jobs running."))
else Stream.eval(logger.finfo(s"Waiting for ${state.getRunning.size} jobs to finish.")) ++
Stream.emit(state)
})
else
Stream.eval(logger.finfo(s"Waiting for ${state.getRunning.size} jobs to finish.")) ++
Stream.emit(state)
}
(wait.drain ++ Stream.emit(())).compile.lastOrError
}
def start: Stream[F, Nothing] =
logger.sinfo("Starting scheduler") ++
mainLoop
mainLoop
def mainLoop: Stream[F, Nothing] = {
val body: F[Boolean] =
for {
_ <- permits.available.flatMap(a => logger.fdebug(s"Try to acquire permit ($a free)"))
_ <- permits.acquire
_ <- logger.fdebug("New permit acquired")
down <- state.get.map(_.shutdownRequest)
rjob <- if (down) logger.finfo("") *> permits.release *> (None: Option[RJob]).pure[F]
else queue.nextJob(group => state.modify(_.nextPrio(group, config.countingScheme)), config.name, config.retryDelay)
_ <- logger.fdebug(s"Next job found: ${rjob.map(_.info)}")
_ <- rjob.map(execute).getOrElse(permits.release)
_ <- permits.available.flatMap(a => logger.fdebug(s"Try to acquire permit ($a free)"))
_ <- permits.acquire
_ <- logger.fdebug("New permit acquired")
down <- state.get.map(_.shutdownRequest)
rjob <- if (down) logger.finfo("") *> permits.release *> (None: Option[RJob]).pure[F]
else
queue.nextJob(
group => state.modify(_.nextPrio(group, config.countingScheme)),
config.name,
config.retryDelay
)
_ <- logger.fdebug(s"Next job found: ${rjob.map(_.info)}")
_ <- rjob.map(execute).getOrElse(permits.release)
} yield rjob.isDefined
Stream.eval(state.get.map(_.shutdownRequest)).
evalTap(if (_) logger.finfo[F]("Stopping main loop due to shutdown request.") else ().pure[F]).
flatMap(if (_) Stream.empty else Stream.eval(body)).
flatMap({
Stream
.eval(state.get.map(_.shutdownRequest))
.evalTap(
if (_) logger.finfo[F]("Stopping main loop due to shutdown request.")
else ().pure[F]
)
.flatMap(if (_) Stream.empty else Stream.eval(body))
.flatMap({
case true =>
mainLoop
case false =>
logger.sdebug(s"Waiting for notify") ++
waiter.discrete.take(2).drain ++
logger.sdebug(s"Notify signal, going into main loop") ++
mainLoop
waiter.discrete.take(2).drain ++
logger.sdebug(s"Notify signal, going into main loop") ++
mainLoop
})
}
def execute(job: RJob): F[Unit] = {
val task = for {
jobtask <- tasks.find(job.task).toRight(s"This executor cannot run tasks with name: ${job.task}")
jobtask <- tasks
.find(job.task)
.toRight(s"This executor cannot run tasks with name: ${job.task}")
} yield jobtask
task match {
@ -122,18 +142,25 @@ final class SchedulerImpl[F[_]: ConcurrentEffect : ContextShift](val config: Sch
def onFinish(job: RJob, finalState: JobState): F[Unit] =
for {
_ <- logger.fdebug(s"Job ${job.info} done $finalState. Releasing resources.")
_ <- permits.release *> permits.available.flatMap(a => logger.fdebug(s"Permit released ($a free)"))
_ <- state.modify(_.removeRunning(job))
_ <- QJob.setFinalState(job.id, finalState, store)
_ <- logger.fdebug(s"Job ${job.info} done $finalState. Releasing resources.")
_ <- permits.release *> permits.available.flatMap(a =>
logger.fdebug(s"Permit released ($a free)")
)
_ <- state.modify(_.removeRunning(job))
_ <- QJob.setFinalState(job.id, finalState, store)
} yield ()
def onStart(job: RJob): F[Unit] =
QJob.setRunning(job.id, config.name, store) //also increments retries if current state=stuck
def wrapTask(job: RJob, task: Task[F, String, Unit], ctx: Context[F, String]): Task[F, String, Unit] = {
task.mapF(fa => onStart(job) *> logger.fdebug("Starting task now") *> blocker.blockOn(fa)).
mapF(_.attempt.flatMap({
def wrapTask(
job: RJob,
task: Task[F, String, Unit],
ctx: Context[F, String]
): Task[F, String, Unit] =
task
.mapF(fa => onStart(job) *> logger.fdebug("Starting task now") *> blocker.blockOn(fa))
.mapF(_.attempt.flatMap({
case Right(()) =>
logger.info(s"Job execution successful: ${job.info}")
ctx.logger.info("Job execution successful") *>
@ -148,16 +175,18 @@ final class SchedulerImpl[F[_]: ConcurrentEffect : ContextShift](val config: Sch
QJob.exceedsRetries(job.id, config.retries, store).flatMap {
case true =>
logger.error(ex)(s"Job ${job.info} execution failed. Retries exceeded.")
ctx.logger.error(ex)(s"Job ${job.info} execution failed. Retries exceeded.").
map(_ => JobState.Failed: JobState)
ctx.logger
.error(ex)(s"Job ${job.info} execution failed. Retries exceeded.")
.map(_ => JobState.Failed: JobState)
case false =>
logger.error(ex)(s"Job ${job.info} execution failed. Retrying later.")
ctx.logger.error(ex)(s"Job ${job.info} execution failed. Retrying later.").
map(_ => JobState.Stuck: JobState)
ctx.logger
.error(ex)(s"Job ${job.info} execution failed. Retrying later.")
.map(_ => JobState.Stuck: JobState)
}
}
})).
mapF(_.attempt.flatMap {
}))
.mapF(_.attempt.flatMap {
case Right(jstate) =>
onFinish(job, jstate)
case Left(ex) =>
@ -165,14 +194,14 @@ final class SchedulerImpl[F[_]: ConcurrentEffect : ContextShift](val config: Sch
// we don't know the real outcome here
// since tasks should be idempotent, set it to stuck. if above has failed, this might fail anyways
onFinish(job, JobState.Stuck)
})
}
})
def forkRun(job: RJob, code: F[Unit], onCancel: F[Unit], ctx: Context[F, String]): F[F[Unit]] = {
val bfa = blocker.blockOn(code)
logger.fdebug(s"Forking job ${job.info}") *>
ConcurrentEffect[F].start(bfa).
map(fiber =>
ConcurrentEffect[F]
.start(bfa)
.map(fiber =>
logger.fdebug(s"Cancelling job ${job.info}") *>
fiber.cancel *>
onCancel.attempt.map({
@ -184,7 +213,8 @@ final class SchedulerImpl[F[_]: ConcurrentEffect : ContextShift](val config: Sch
state.modify(_.markCancelled(job)) *>
onFinish(job, JobState.Cancelled) *>
ctx.logger.warn("Job has been cancelled.") *>
logger.fdebug(s"Job ${job.info} has been cancelled."))
logger.fdebug(s"Job ${job.info} has been cancelled.")
)
}
}
@ -193,10 +223,12 @@ object SchedulerImpl {
def emptyState[F[_]]: State[F] =
State(Map.empty, Set.empty, Map.empty, false)
case class State[F[_]]( counters: Map[Ident, CountingScheme]
, cancelled: Set[Ident]
, cancelTokens: Map[Ident, CancelToken[F]]
, shutdownRequest: Boolean) {
case class State[F[_]](
counters: Map[Ident, CountingScheme],
cancelled: Set[Ident],
cancelTokens: Map[Ident, CancelToken[F]],
shutdownRequest: Boolean
) {
def nextPrio(group: Ident, initial: CountingScheme): (State[F], Priority) = {
val (cs, prio) = counters.getOrElse(group, initial).nextPriority

View File

@ -24,11 +24,11 @@ trait Task[F[_], A, B] {
def mapF[C](f: F[B] => F[C]): Task[F, A, C] =
Task(Task.toKleisli(this).mapF(f))
def attempt(implicit F: ApplicativeError[F,Throwable]): Task[F, A, Either[Throwable, B]] =
def attempt(implicit F: ApplicativeError[F, Throwable]): Task[F, A, Either[Throwable, B]] =
mapF(_.attempt)
def contramap[C](f: C => F[A])(implicit F: FlatMap[F]): Task[F, C, B] = {
ctxc: Context[F, C] => f(ctxc.args).flatMap(a => run(ctxc.map(_ => a)))
def contramap[C](f: C => F[A])(implicit F: FlatMap[F]): Task[F, C, B] = { ctxc: Context[F, C] =>
f(ctxc.args).flatMap(a => run(ctxc.map(_ => a)))
}
}
@ -46,7 +46,6 @@ object Task {
def apply[F[_], A, B](k: Kleisli[F, Context[F, A], B]): Task[F, A, B] =
c => k.run(c)
def toKleisli[F[_], A, B](t: Task[F, A, B]): Kleisli[F, Context[F, A], B] =
Kleisli(t.run)

View File

@ -6,8 +6,8 @@ import minitest.SimpleTestSuite
object CountingSchemeSpec extends SimpleTestSuite {
test("counting") {
val cs = CountingScheme(2,1)
val list = List.iterate(cs.nextPriority, 6)(_._1.nextPriority).map(_._2)
val cs = CountingScheme(2, 1)
val list = List.iterate(cs.nextPriority, 6)(_._1.nextPriority).map(_._2)
val expect = List(Priority.High, Priority.High, Priority.Low)
assertEquals(list, expect ++ expect)
}

View File

@ -7,27 +7,37 @@ import docspell.backend.{Config => BackendConfig}
import docspell.common._
import scodec.bits.ByteVector
case class Config(appName: String
, appId: Ident
, baseUrl: LenientUri
, bind: Config.Bind
, backend: BackendConfig
, auth: Login.Config
case class Config(
appName: String,
appId: Ident,
baseUrl: LenientUri,
bind: Config.Bind,
backend: BackendConfig,
auth: Login.Config
)
object Config {
val postgres = JdbcConfig(LenientUri.unsafe("jdbc:postgresql://localhost:5432/docspelldev"), "dev", "dev")
val h2 = JdbcConfig(LenientUri.unsafe("jdbc:h2:./target/docspelldev.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE"), "sa", "")
val postgres =
JdbcConfig(LenientUri.unsafe("jdbc:postgresql://localhost:5432/docspelldev"), "dev", "dev")
val h2 = JdbcConfig(
LenientUri.unsafe("jdbc:h2:./target/docspelldev.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE"),
"sa",
""
)
val default: Config =
Config("Docspell"
, Ident.unsafe("restserver1")
, LenientUri.unsafe("http://localhost:7880")
, Config.Bind("localhost", 7880)
, BackendConfig(postgres
, SignupConfig(SignupConfig.invite, Password("testpass"), Duration.hours(5 * 24))
, BackendConfig.Files(512 * 1024, List(MimeType.pdf)))
, Login.Config(ByteVector.fromValidHex("caffee"), Duration.minutes(2)))
Config(
"Docspell",
Ident.unsafe("restserver1"),
LenientUri.unsafe("http://localhost:7880"),
Config.Bind("localhost", 7880),
BackendConfig(
postgres,
SignupConfig(SignupConfig.invite, Password("testpass"), Duration.hours(5 * 24)),
BackendConfig.Files(512 * 1024, List(MimeType.pdf))
),
Login.Config(ByteVector.fromValidHex("caffee"), Duration.minutes(2))
)
case class Bind(address: String, port: Int)
}

View File

@ -12,7 +12,7 @@ object ConfigFile {
ConfigSource.default.at("docspell.server").loadOrThrow[Config]
object Implicits {
implicit val signupModeReader: ConfigReader[SignupConfig.Mode] =
implicit val signupModeReader: ConfigReader[SignupConfig.Mode] =
ConfigReader[String].emap(reason(SignupConfig.Mode.fromString))
}
}

View File

@ -13,12 +13,14 @@ import org.log4s._
object Main extends IOApp {
private[this] val logger = getLogger
val blockingEc: ExecutionContext = ExecutionContext.fromExecutor(Executors.newCachedThreadPool(
ThreadFactories.ofName("docspell-restserver-blocking")))
val blockingEc: ExecutionContext = ExecutionContext.fromExecutor(
Executors.newCachedThreadPool(ThreadFactories.ofName("docspell-restserver-blocking"))
)
val blocker = Blocker.liftExecutionContext(blockingEc)
val connectEC: ExecutionContext = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(5,
ThreadFactories.ofName("docspell-dbconnect")))
val connectEC: ExecutionContext = ExecutionContext.fromExecutorService(
Executors.newFixedThreadPool(5, ThreadFactories.ofName("docspell-dbconnect"))
)
def run(args: List[String]) = {
args match {
@ -41,12 +43,15 @@ object Main extends IOApp {
}
val cfg = ConfigFile.loadConfig
val banner = Banner("REST Server"
, BuildInfo.version
, BuildInfo.gitHeadCommit
, cfg.backend.jdbc.url
, Option(System.getProperty("config.file"))
, cfg.appId, cfg.baseUrl)
val banner = Banner(
"REST Server",
BuildInfo.version,
BuildInfo.gitHeadCommit,
cfg.backend.jdbc.url,
Option(System.getProperty("config.file")),
cfg.appId,
cfg.baseUrl
)
logger.info(s"\n${banner.render("***>")}")
RestServer.stream[IO](cfg, connectEC, blockingEc, blocker).compile.drain.as(ExitCode.Success)
}

View File

@ -7,7 +7,8 @@ import docspell.common.NodeType
import scala.concurrent.ExecutionContext
final class RestAppImpl[F[_]: Sync](val config: Config, val backend: BackendApp[F]) extends RestApp[F] {
final class RestAppImpl[F[_]: Sync](val config: Config, val backend: BackendApp[F])
extends RestApp[F] {
def init: F[Unit] =
backend.node.register(config.appId, NodeType.Restserver, config.baseUrl)
@ -18,11 +19,16 @@ final class RestAppImpl[F[_]: Sync](val config: Config, val backend: BackendApp[
object RestAppImpl {
def create[F[_]: ConcurrentEffect: ContextShift](cfg: Config, connectEC: ExecutionContext, httpClientEc: ExecutionContext, blocker: Blocker): Resource[F, RestApp[F]] =
def create[F[_]: ConcurrentEffect: ContextShift](
cfg: Config,
connectEC: ExecutionContext,
httpClientEc: ExecutionContext,
blocker: Blocker
): Resource[F, RestApp[F]] =
for {
backend <- BackendApp(cfg.backend, connectEC, httpClientEc, blocker)
app = new RestAppImpl[F](cfg, backend)
appR <- Resource.make(app.init.map(_ => app))(_.shutdown)
backend <- BackendApp(cfg.backend, connectEC, httpClientEc, blocker)
app = new RestAppImpl[F](cfg, backend)
appR <- Resource.make(app.init.map(_ => app))(_.shutdown)
} yield appR
}

View File

@ -15,54 +15,64 @@ import scala.concurrent.ExecutionContext
object RestServer {
def stream[F[_]: ConcurrentEffect](cfg: Config, connectEC: ExecutionContext, httpClientEc: ExecutionContext, blocker: Blocker)
(implicit T: Timer[F], CS: ContextShift[F]): Stream[F, Nothing] = {
def stream[F[_]: ConcurrentEffect](
cfg: Config,
connectEC: ExecutionContext,
httpClientEc: ExecutionContext,
blocker: Blocker
)(implicit T: Timer[F], CS: ContextShift[F]): Stream[F, Nothing] = {
val app = for {
restApp <- RestAppImpl.create[F](cfg, connectEC, httpClientEc, blocker)
restApp <- RestAppImpl.create[F](cfg, connectEC, httpClientEc, blocker)
httpApp = Router(
"/api/info" -> routes.InfoRoutes(),
"/api/info" -> routes.InfoRoutes(),
"/api/v1/open/" -> openRoutes(cfg, restApp),
"/api/v1/sec/" -> Authenticate(restApp.backend.login, cfg.auth) {
token => securedRoutes(cfg, restApp, token)
"/api/v1/sec/" -> Authenticate(restApp.backend.login, cfg.auth) { token =>
securedRoutes(cfg, restApp, token)
},
"/app/assets" -> WebjarRoutes.appRoutes[F](blocker),
"/app" -> TemplateRoutes[F](blocker, cfg)
"/app" -> TemplateRoutes[F](blocker, cfg)
).orNotFound
finalHttpApp = Logger.httpApp(logHeaders = false, logBody = false)(httpApp)
} yield finalHttpApp
Stream.resource(app).flatMap(httpApp =>
BlazeServerBuilder[F].
bindHttp(cfg.bind.port, cfg.bind.address).
withHttpApp(httpApp).
withoutBanner.
serve)
Stream
.resource(app)
.flatMap(httpApp =>
BlazeServerBuilder[F]
.bindHttp(cfg.bind.port, cfg.bind.address)
.withHttpApp(httpApp)
.withoutBanner
.serve
)
}.drain
def securedRoutes[F[_]: Effect](cfg: Config, restApp: RestApp[F], token: AuthToken): HttpRoutes[F] =
def securedRoutes[F[_]: Effect](
cfg: Config,
restApp: RestApp[F],
token: AuthToken
): HttpRoutes[F] =
Router(
"auth" -> LoginRoutes.session(restApp.backend.login, cfg),
"tag" -> TagRoutes(restApp.backend, token),
"equipment" -> EquipmentRoutes(restApp.backend, token),
"auth" -> LoginRoutes.session(restApp.backend.login, cfg),
"tag" -> TagRoutes(restApp.backend, token),
"equipment" -> EquipmentRoutes(restApp.backend, token),
"organization" -> OrganizationRoutes(restApp.backend, token),
"person" -> PersonRoutes(restApp.backend, token),
"source" -> SourceRoutes(restApp.backend, token),
"user" -> UserRoutes(restApp.backend, token),
"collective" -> CollectiveRoutes(restApp.backend, token),
"queue" -> JobQueueRoutes(restApp.backend, token),
"item" -> ItemRoutes(restApp.backend, token),
"attachment" -> AttachmentRoutes(restApp.backend, token),
"upload" -> UploadRoutes.secured(restApp.backend, cfg, token)
"person" -> PersonRoutes(restApp.backend, token),
"source" -> SourceRoutes(restApp.backend, token),
"user" -> UserRoutes(restApp.backend, token),
"collective" -> CollectiveRoutes(restApp.backend, token),
"queue" -> JobQueueRoutes(restApp.backend, token),
"item" -> ItemRoutes(restApp.backend, token),
"attachment" -> AttachmentRoutes(restApp.backend, token),
"upload" -> UploadRoutes.secured(restApp.backend, cfg, token)
)
def openRoutes[F[_]: Effect](cfg: Config, restApp: RestApp[F]): HttpRoutes[F] =
Router(
"auth" -> LoginRoutes.login(restApp.backend.login, cfg),
"auth" -> LoginRoutes.login(restApp.backend.login, cfg),
"signup" -> RegisterRoutes(restApp.backend, cfg),
"upload" -> UploadRoutes.open(restApp.backend, cfg)
)

View File

@ -8,13 +8,20 @@ import docspell.restserver.Config
case class CookieData(auth: AuthToken) {
def accountId: AccountId = auth.account
def asString: String = auth.asString
def asString: String = auth.asString
def asCookie(cfg: Config): ResponseCookie = {
val domain = cfg.baseUrl.host
val sec = cfg.baseUrl.scheme.exists(_.endsWith("s"))
val path = cfg.baseUrl.path/"api"/"v1"/"sec"
ResponseCookie(CookieData.cookieName, asString, domain = domain, path = Some(path.asString), httpOnly = true, secure = sec)
val sec = cfg.baseUrl.scheme.exists(_.endsWith("s"))
val path = cfg.baseUrl.path / "api" / "v1" / "sec"
ResponseCookie(
CookieData.cookieName,
asString,
domain = domain,
path = Some(path.asString),
httpOnly = true,
secure = sec
)
}
}
object CookieData {
@ -22,18 +29,21 @@ object CookieData {
val headerName = "X-Docspell-Auth"
def authenticator[F[_]](r: Request[F]): Either[String, String] =
fromCookie(r) orElse fromHeader(r)
fromCookie(r).orElse(fromHeader(r))
def fromCookie[F[_]](req: Request[F]): Either[String, String] = {
def fromCookie[F[_]](req: Request[F]): Either[String, String] =
for {
header <- headers.Cookie.from(req.headers).toRight("Cookie parsing error")
cookie <- header.values.toList.find(_.name == cookieName).toRight("Couldn't find the authcookie")
header <- headers.Cookie.from(req.headers).toRight("Cookie parsing error")
cookie <- header.values.toList
.find(_.name == cookieName)
.toRight("Couldn't find the authcookie")
} yield cookie.content
}
def fromHeader[F[_]](req: Request[F]): Either[String, String] = {
req.headers.get(CaseInsensitiveString(headerName)).map(_.value).toRight("Couldn't find an authenticator")
}
def fromHeader[F[_]](req: Request[F]): Either[String, String] =
req.headers
.get(CaseInsensitiveString(headerName))
.map(_.value)
.toRight("Couldn't find an authenticator")
def deleteCookie(cfg: Config): ResponseCookie =
ResponseCookie(

View File

@ -24,31 +24,37 @@ trait Conversions {
// insights
def mkItemInsights(d: InsightData): ItemInsights =
ItemInsights(d.incoming, d.outgoing, d.bytes, TagCloud(d.tags.toList.map(p => NameCount(p._1, p._2))))
ItemInsights(
d.incoming,
d.outgoing,
d.bytes,
TagCloud(d.tags.toList.map(p => NameCount(p._1, p._2)))
)
// attachment meta
def mkAttachmentMeta(rm: RAttachmentMeta): AttachmentMeta =
AttachmentMeta(rm.content.getOrElse("")
, rm.nerlabels.map(nl => Label(nl.tag, nl.label, nl.startPosition, nl.endPosition))
, mkItemProposals(rm.proposals))
AttachmentMeta(
rm.content.getOrElse(""),
rm.nerlabels.map(nl => Label(nl.tag, nl.label, nl.startPosition, nl.endPosition)),
mkItemProposals(rm.proposals)
)
// item proposal
def mkItemProposals(ml: MetaProposalList): ItemProposals = {
def get(mpt: MetaProposalType) =
ml.find(mpt).
map(mp => mp.values.toList.map(_.ref).map(mkIdName)).
getOrElse(Nil)
ml.find(mpt).map(mp => mp.values.toList.map(_.ref).map(mkIdName)).getOrElse(Nil)
def getDates(mpt: MetaProposalType): List[Timestamp] =
ml.find(mpt).
map(mp => mp.values.toList.
map(cand => cand.ref.id.id).
flatMap(str => Either.catchNonFatal(LocalDate.parse(str)).toOption).
map(_.atTime(12, 0).atZone(ZoneId.of("GMT"))).
map(zdt => Timestamp(zdt.toInstant))).
getOrElse(Nil).
distinct.
take(5)
ml.find(mpt)
.map(mp =>
mp.values.toList
.map(cand => cand.ref.id.id)
.flatMap(str => Either.catchNonFatal(LocalDate.parse(str)).toOption)
.map(_.atTime(12, 0).atZone(ZoneId.of("GMT")))
.map(zdt => Timestamp(zdt.toInstant))
)
.getOrElse(Nil)
.distinct
.take(5)
ItemProposals(
corrOrg = get(MetaProposalType.CorrOrg),
@ -62,23 +68,25 @@ trait Conversions {
// item detail
def mkItemDetail(data: OItem.ItemData): ItemDetail =
ItemDetail(data.item.id
, data.item.direction
, data.item.name
, data.item.source
, data.item.state
, data.item.created
, data.item.updated
, data.item.itemDate
, data.corrOrg.map(o => IdName(o.oid, o.name))
, data.corrPerson.map(p => IdName(p.pid, p.name))
, data.concPerson.map(p => IdName(p.pid, p.name))
, data.concEquip.map(e => IdName(e.eid, e.name))
, data.inReplyTo.map(mkIdName)
, data.item.dueDate
, data.item.notes
, data.attachments.map((mkAttachment _).tupled).toList
, data.tags.map(mkTag).toList)
ItemDetail(
data.item.id,
data.item.direction,
data.item.name,
data.item.source,
data.item.state,
data.item.created,
data.item.updated,
data.item.itemDate,
data.corrOrg.map(o => IdName(o.oid, o.name)),
data.corrPerson.map(p => IdName(p.pid, p.name)),
data.concPerson.map(p => IdName(p.pid, p.name)),
data.concEquip.map(e => IdName(e.eid, e.name)),
data.inReplyTo.map(mkIdName),
data.item.dueDate,
data.item.notes,
data.attachments.map((mkAttachment _).tupled).toList,
data.tags.map(mkTag).toList
)
def mkAttachment(ra: RAttachment, m: FileMeta): Attachment =
Attachment(ra.id, ra.name, m.length, MimeType.unsafe(m.mimetype.asString))
@ -86,20 +94,21 @@ trait Conversions {
// item list
def mkQuery(m: ItemSearch, coll: Ident): OItem.Query =
OItem.Query(coll
, m.name
, if (m.inbox) Seq(ItemState.Created) else Seq(ItemState.Created, ItemState.Confirmed)
, m.direction
, m.corrPerson
, m.corrOrg
, m.concPerson
, m.concEquip
, m.tagsInclude.map(Ident.unsafe)
, m.tagsExclude.map(Ident.unsafe)
, m.dateFrom
, m.dateUntil
, m.dueDateFrom
, m.dueDateUntil
OItem.Query(
coll,
m.name,
if (m.inbox) Seq(ItemState.Created) else Seq(ItemState.Created, ItemState.Confirmed),
m.direction,
m.corrPerson,
m.corrOrg,
m.concPerson,
m.concEquip,
m.tagsInclude.map(Ident.unsafe),
m.tagsExclude.map(Ident.unsafe),
m.dateFrom,
m.dateUntil,
m.dueDateFrom,
m.dueDateUntil
)
def mkItemList(v: Vector[OItem.ListItem]): ItemLightList = {
@ -113,8 +122,20 @@ trait Conversions {
}
def mkItemLight(i: OItem.ListItem): ItemLight =
ItemLight(i.id, i.name, i.state, i.date, i.dueDate, i.source, i.direction.name.some, i.corrOrg.map(mkIdName),
i.corrPerson.map(mkIdName), i.concPerson.map(mkIdName), i.concEquip.map(mkIdName), i.fileCount)
ItemLight(
i.id,
i.name,
i.state,
i.date,
i.dueDate,
i.source,
i.direction.name.some,
i.corrOrg.map(mkIdName),
i.corrPerson.map(mkIdName),
i.concPerson.map(mkIdName),
i.concEquip.map(mkIdName),
i.fileCount
)
// job
def mkJobQueueState(state: OJob.CollectiveQueueState): JobQueueState = {
@ -128,46 +149,57 @@ trait Conversions {
val t2 = f(j2).getOrElse(Timestamp.Epoch)
t1.value.isBefore(t2.value)
}
JobQueueState(state.running.map(mkJobDetail).toList.sortWith(asc(_.started))
, state.done.map(mkJobDetail).toList.sortWith(desc(_.finished))
, state.queued.map(mkJobDetail).toList.sortWith(asc(_.submitted.some)))
JobQueueState(
state.running.map(mkJobDetail).toList.sortWith(asc(_.started)),
state.done.map(mkJobDetail).toList.sortWith(desc(_.finished)),
state.queued.map(mkJobDetail).toList.sortWith(asc(_.submitted.some))
)
}
def mkJobDetail(jd: OJob.JobDetail): JobDetail =
JobDetail(jd.job.id
, jd.job.subject
, jd.job.submitted
, jd.job.priority
, jd.job.state
, jd.job.retries
, jd.logs.map(mkJobLog).toList
, jd.job.progress
, jd.job.worker
, jd.job.started
, jd.job.finished)
JobDetail(
jd.job.id,
jd.job.subject,
jd.job.submitted,
jd.job.priority,
jd.job.state,
jd.job.retries,
jd.logs.map(mkJobLog).toList,
jd.job.progress,
jd.job.worker,
jd.job.started,
jd.job.finished
)
def mkJobLog(jl: RJobLog): JobLogEvent =
JobLogEvent(jl.created, jl.level, jl.message)
// upload
def readMultipart[F[_]: Effect](mp: Multipart[F], logger: Logger, prio: Priority, validFileTypes: Seq[MimeType]): F[UploadData[F]] = {
def parseMeta(body: Stream[F, Byte]): F[ItemUploadMeta] = {
body.through(fs2.text.utf8Decode).
parseJsonAs[ItemUploadMeta].
map(_.fold(ex => {
def readMultipart[F[_]: Effect](
mp: Multipart[F],
logger: Logger,
prio: Priority,
validFileTypes: Seq[MimeType]
): F[UploadData[F]] = {
def parseMeta(body: Stream[F, Byte]): F[ItemUploadMeta] =
body
.through(fs2.text.utf8Decode)
.parseJsonAs[ItemUploadMeta]
.map(_.fold(ex => {
logger.error(ex)("Reading upload metadata failed.")
throw ex
}, identity))
}
val meta: F[(Boolean, UploadMeta)] = mp.parts.find(_.name.exists(_ equalsIgnoreCase "meta")).
map(p => parseMeta(p.body)).
map(fm => fm.map(m => (m.multiple, UploadMeta(m.direction, "webapp", validFileTypes)))).
getOrElse((true, UploadMeta(None, "webapp", validFileTypes)).pure[F])
val meta: F[(Boolean, UploadMeta)] = mp.parts
.find(_.name.exists(_.equalsIgnoreCase("meta")))
.map(p => parseMeta(p.body))
.map(fm => fm.map(m => (m.multiple, UploadMeta(m.direction, "webapp", validFileTypes))))
.getOrElse((true, UploadMeta(None, "webapp", validFileTypes)).pure[F])
val files = mp.parts.
filter(p => p.name.forall(s => !s.equalsIgnoreCase("meta"))).
map(p => OUpload.File(p.filename, p.headers.get(`Content-Type`).map(fromContentType), p.body))
val files = mp.parts
.filter(p => p.name.forall(s => !s.equalsIgnoreCase("meta")))
.map(p => OUpload.File(p.filename, p.headers.get(`Content-Type`).map(fromContentType), p.body)
)
for {
metaData <- meta
_ <- Effect[F].delay(logger.debug(s"Parsed upload meta data: $metaData"))
@ -178,8 +210,14 @@ trait Conversions {
// organization and person
def mkOrg(v: OOrganization.OrgAndContacts): Organization = {
val ro = v.org
Organization(ro.oid, ro.name, Address(ro.street, ro.zip, ro.city, ro.country),
v.contacts.map(mkContact).toList, ro.notes, ro.created)
Organization(
ro.oid,
ro.name,
Address(ro.street, ro.zip, ro.city, ro.country),
v.contacts.map(mkContact).toList,
ro.notes,
ro.created
)
}
def newOrg[F[_]: Sync](v: Organization, cid: Ident): F[OOrganization.OrgAndContacts] = {
@ -189,7 +227,17 @@ trait Conversions {
now <- Timestamp.current[F]
oid <- Ident.randomId[F]
cont <- contacts(oid)
org = ROrganization(oid, cid, v.name, v.address.street, v.address.zip, v.address.city, v.address.country, v.notes, now)
org = ROrganization(
oid,
cid,
v.name,
v.address.street,
v.address.zip,
v.address.city,
v.address.country,
v.notes,
now
)
} yield OOrganization.OrgAndContacts(org, cont)
}
@ -197,15 +245,32 @@ trait Conversions {
def contacts(oid: Ident) =
v.contacts.traverse(c => newContact(c, oid.some, None))
for {
cont <- contacts(v.id)
org = ROrganization(v.id, cid, v.name, v.address.street, v.address.zip, v.address.city, v.address.country, v.notes, v.created)
cont <- contacts(v.id)
org = ROrganization(
v.id,
cid,
v.name,
v.address.street,
v.address.zip,
v.address.city,
v.address.country,
v.notes,
v.created
)
} yield OOrganization.OrgAndContacts(org, cont)
}
def mkPerson(v: OOrganization.PersonAndContacts): Person = {
val ro = v.person
Person(ro.pid, ro.name, Address(ro.street, ro.zip, ro.city, ro.country),
v.contacts.map(mkContact).toList, ro.notes, ro.concerning, ro.created)
Person(
ro.pid,
ro.name,
Address(ro.street, ro.zip, ro.city, ro.country),
v.contacts.map(mkContact).toList,
ro.notes,
ro.concerning,
ro.created
)
}
def newPerson[F[_]: Sync](v: Person, cid: Ident): F[OOrganization.PersonAndContacts] = {
@ -215,7 +280,18 @@ trait Conversions {
now <- Timestamp.current[F]
pid <- Ident.randomId[F]
cont <- contacts(pid)
org = RPerson(pid, cid, v.name, v.address.street, v.address.zip, v.address.city, v.address.country, v.notes, v.concerning, now)
org = RPerson(
pid,
cid,
v.name,
v.address.street,
v.address.zip,
v.address.city,
v.address.country,
v.notes,
v.concerning,
now
)
} yield OOrganization.PersonAndContacts(org, cont)
}
@ -223,8 +299,19 @@ trait Conversions {
def contacts(pid: Ident) =
v.contacts.traverse(c => newContact(c, None, pid.some))
for {
cont <- contacts(v.id)
org = RPerson(v.id, cid, v.name, v.address.street, v.address.zip, v.address.city, v.address.country, v.notes, v.concerning, v.created)
cont <- contacts(v.id)
org = RPerson(
v.id,
cid,
v.name,
v.address.street,
v.address.zip,
v.address.city,
v.address.country,
v.notes,
v.concerning,
v.created
)
} yield OOrganization.PersonAndContacts(org, cont)
}
@ -233,7 +320,8 @@ trait Conversions {
Contact(rc.contactId, rc.value, rc.kind)
def newContact[F[_]: Sync](c: Contact, oid: Option[Ident], pid: Option[Ident]): F[RContact] =
timeId.map { case (id, now) =>
timeId.map {
case (id, now) =>
RContact(id, c.value, c.kind, pid, oid, now)
}
@ -242,12 +330,33 @@ trait Conversions {
User(ru.login, ru.state, None, ru.email, ru.lastLogin, ru.loginCount, ru.created)
def newUser[F[_]: Sync](u: User, cid: Ident): F[RUser] =
timeId.map { case (id, now) =>
RUser(id, u.login, cid, u.password.getOrElse(Password.empty), u.state, u.email, 0, None, now)
timeId.map {
case (id, now) =>
RUser(
id,
u.login,
cid,
u.password.getOrElse(Password.empty),
u.state,
u.email,
0,
None,
now
)
}
def changeUser(u: User, cid: Ident): RUser =
RUser(Ident.unsafe(""), u.login, cid, u.password.getOrElse(Password.empty), u.state, u.email, u.loginCount, u.lastLogin, u.created)
RUser(
Ident.unsafe(""),
u.login,
cid,
u.password.getOrElse(Password.empty),
u.state,
u.email,
u.loginCount,
u.lastLogin,
u.created
)
// tags
@ -255,34 +364,36 @@ trait Conversions {
Tag(rt.tagId, rt.name, rt.category, rt.created)
def newTag[F[_]: Sync](t: Tag, cid: Ident): F[RTag] =
timeId.map { case (id, now) =>
RTag(id, cid, t.name, t.category, now)
timeId.map {
case (id, now) =>
RTag(id, cid, t.name, t.category, now)
}
def changeTag(t: Tag, cid: Ident): RTag =
RTag(t.id, cid, t.name, t.category, t.created)
// sources
def mkSource(s: RSource): Source =
Source(s.sid, s.abbrev, s.description, s.counter, s.enabled, s.priority, s.created)
def newSource[F[_]: Sync](s: Source, cid: Ident): F[RSource] =
timeId.map({ case (id, now) =>
RSource(id, cid, s.abbrev, s.description, 0, s.enabled, s.priority, now)
timeId.map({
case (id, now) =>
RSource(id, cid, s.abbrev, s.description, 0, s.enabled, s.priority, now)
})
def changeSource[F[_]: Sync](s: Source, coll: Ident): RSource =
RSource(s.id, coll, s.abbrev, s.description, s.counter, s.enabled, s.priority, s.created)
RSource(s.id, coll, s.abbrev, s.description, s.counter, s.enabled, s.priority, s.created)
// equipment
def mkEquipment(re: REquipment): Equipment =
Equipment(re.eid, re.name, re.created)
def newEquipment[F[_]: Sync](e: Equipment, cid: Ident): F[REquipment] =
timeId.map({ case (id, now) =>
REquipment(id, cid, e.name, now)
timeId.map({
case (id, now) =>
REquipment(id, cid, e.name, now)
})
def changeEquipment(e: Equipment, cid: Ident): REquipment =
@ -298,26 +409,28 @@ trait Conversions {
def basicResult(cr: JobCancelResult): BasicResult =
cr match {
case JobCancelResult.JobNotFound => BasicResult(false, "Job not found")
case JobCancelResult.CancelRequested => BasicResult(true, "Cancel was requested at the job executor")
case JobCancelResult.CancelRequested =>
BasicResult(true, "Cancel was requested at the job executor")
case JobCancelResult.Removed => BasicResult(true, "The job has been removed from the queue.")
}
def basicResult(ar: AddResult, successMsg: String): BasicResult = ar match {
case AddResult.Success => BasicResult(true, successMsg)
case AddResult.Success => BasicResult(true, successMsg)
case AddResult.EntityExists(msg) => BasicResult(false, msg)
case AddResult.Failure(ex) => BasicResult(false, s"Internal error: ${ex.getMessage}")
case AddResult.Failure(ex) => BasicResult(false, s"Internal error: ${ex.getMessage}")
}
def basicResult(ur: OUpload.UploadResult): BasicResult = ur match {
case UploadResult.Success => BasicResult(true, "Files submitted.")
case UploadResult.NoFiles => BasicResult(false, "There were no files to submit.")
case UploadResult.Success => BasicResult(true, "Files submitted.")
case UploadResult.NoFiles => BasicResult(false, "There were no files to submit.")
case UploadResult.NoSource => BasicResult(false, "The source id is not valid.")
}
def basicResult(cr: PassChangeResult): BasicResult = cr match {
case PassChangeResult.Success => BasicResult(true, "Password changed.")
case PassChangeResult.Success => BasicResult(true, "Password changed.")
case PassChangeResult.UpdateFailed => BasicResult(false, "The database update failed.")
case PassChangeResult.PasswordMismatch => BasicResult(false, "The current password is incorrect.")
case PassChangeResult.PasswordMismatch =>
BasicResult(false, "The current password is incorrect.")
case PassChangeResult.UserNotFound => BasicResult(false, "User not found.")
}

View File

@ -8,28 +8,26 @@ import org.http4s.dsl.Http4sDsl
trait ResponseGenerator[F[_]] {
self: Http4sDsl[F] =>
implicit final class EitherResponses[A,B](e: Either[A, B]) {
def toResponse(headers: Header*)
(implicit F: Applicative[F]
, w0: EntityEncoder[F, A]
, w1: EntityEncoder[F, B]): F[Response[F]] =
implicit final class EitherResponses[A, B](e: Either[A, B]) {
def toResponse(headers: Header*)(
implicit F: Applicative[F],
w0: EntityEncoder[F, A],
w1: EntityEncoder[F, B]
): F[Response[F]] =
e.fold(
a => UnprocessableEntity(a),
b => Ok(b)
).map(_.withHeaders(headers: _*))
a => UnprocessableEntity(a),
b => Ok(b)
)
.map(_.withHeaders(headers: _*))
}
implicit final class OptionResponse[A](o: Option[A]) {
def toResponse(headers: Header*)
(implicit F: Applicative[F]
, w0: EntityEncoder[F, A]): F[Response[F]] =
def toResponse(
headers: Header*
)(implicit F: Applicative[F], w0: EntityEncoder[F, A]): F[Response[F]] =
o.map(a => Ok(a)).getOrElse(NotFound()).map(_.withHeaders(headers: _*))
}
}
object ResponseGenerator {
}
object ResponseGenerator {}

View File

@ -18,17 +18,18 @@ import org.http4s.headers.ETag.EntityTag
object AttachmentRoutes {
def apply[F[_]: Effect](backend: BackendApp[F], user: AuthToken): HttpRoutes[F] = {
val dsl = new Http4sDsl[F]{}
val dsl = new Http4sDsl[F] {}
import dsl._
def makeByteResp(data: OItem.AttachmentData[F]): F[Response[F]] = {
val mt = MediaType.unsafeParse(data.meta.mimetype.asString)
val mt = MediaType.unsafeParse(data.meta.mimetype.asString)
val cntLen: Header = `Content-Length`.unsafeFromLong(data.meta.length)
val eTag: Header = ETag(data.meta.checksum)
val disp: Header = `Content-Disposition`("inline", Map("filename" -> data.ra.name.getOrElse("")))
Ok(data.data.take(data.meta.length)).
map(r => r.withContentType(`Content-Type`(mt)).
withHeaders(cntLen, eTag, disp))
val eTag: Header = ETag(data.meta.checksum)
val disp: Header =
`Content-Disposition`("inline", Map("filename" -> data.ra.name.getOrElse("")))
Ok(data.data.take(data.meta.length)).map(r =>
r.withContentType(`Content-Type`(mt)).withHeaders(cntLen, eTag, disp)
)
}
HttpRoutes.of {
@ -37,21 +38,24 @@ object AttachmentRoutes {
fileData <- backend.item.findAttachment(id, user.account.collective)
inm = req.headers.get(`If-None-Match`).flatMap(_.tags)
matches = matchETag(fileData, inm)
resp <- if (matches) NotModified()
else fileData.map(makeByteResp).getOrElse(NotFound(BasicResult(false, "Not found")))
resp <- if (matches) NotModified()
else
fileData.map(makeByteResp).getOrElse(NotFound(BasicResult(false, "Not found")))
} yield resp
case GET -> Root / Ident(id) / "meta" =>
for {
rm <- backend.item.findAttachmentMeta(id, user.account.collective)
rm <- backend.item.findAttachmentMeta(id, user.account.collective)
md = rm.map(Conversions.mkAttachmentMeta)
resp <- md.map(Ok(_)).getOrElse(NotFound(BasicResult(false, "Not found.")))
} yield resp
}
}
private def matchETag[F[_]]( fileData: Option[OItem.AttachmentData[F]]
, noneMatch: Option[NonEmptyList[EntityTag]]): Boolean =
private def matchETag[F[_]](
fileData: Option[OItem.AttachmentData[F]],
noneMatch: Option[NonEmptyList[EntityTag]]
): Boolean =
(fileData, noneMatch) match {
case (Some(fd), Some(nm)) =>
fd.meta.checksum == nm.head.tag

View File

@ -12,14 +12,17 @@ import org.http4s.server._
object Authenticate {
def authenticateRequest[F[_]: Effect](auth: String => F[Login.Result])(req: Request[F]): F[Login.Result] =
def authenticateRequest[F[_]: Effect](
auth: String => F[Login.Result]
)(req: Request[F]): F[Login.Result] =
CookieData.authenticator(req) match {
case Right(str) => auth(str)
case Left(_) => Login.Result.invalidAuth.pure[F]
case Left(_) => Login.Result.invalidAuth.pure[F]
}
def of[F[_]: Effect](S: Login[F], cfg: Login.Config)(pf: PartialFunction[AuthedRequest[F, AuthToken], F[Response[F]]]): HttpRoutes[F] = {
def of[F[_]: Effect](S: Login[F], cfg: Login.Config)(
pf: PartialFunction[AuthedRequest[F, AuthToken], F[Response[F]]]
): HttpRoutes[F] = {
val dsl: Http4sDsl[F] = new Http4sDsl[F] {}
import dsl._
@ -34,7 +37,9 @@ object Authenticate {
middleware(AuthedRoutes.of(pf))
}
def apply[F[_]: Effect](S: Login[F], cfg: Login.Config)(f: AuthToken => HttpRoutes[F]): HttpRoutes[F] = {
def apply[F[_]: Effect](S: Login[F], cfg: Login.Config)(
f: AuthToken => HttpRoutes[F]
): HttpRoutes[F] = {
val dsl: Http4sDsl[F] = new Http4sDsl[F] {}
import dsl._
@ -49,6 +54,8 @@ object Authenticate {
middleware(AuthedRoutes(authReq => f(authReq.context).run(authReq.req)))
}
private def getUser[F[_]: Effect](auth: String => F[Login.Result]): Kleisli[F, Request[F], Either[String, AuthToken]] =
private def getUser[F[_]: Effect](
auth: String => F[Login.Result]
): Kleisli[F, Request[F], Either[String, AuthToken]] =
Kleisli(r => authenticateRequest(auth)(r).map(_.toEither))
}

View File

@ -25,25 +25,25 @@ object CollectiveRoutes {
resp <- Ok(Conversions.mkItemInsights(ins))
} yield resp
case req@POST -> Root / "settings" =>
case req @ POST -> Root / "settings" =>
for {
settings <- req.as[CollectiveSettings]
res <- backend.collective.updateLanguage(user.account.collective, settings.language)
resp <- Ok(Conversions.basicResult(res, "Language updated."))
settings <- req.as[CollectiveSettings]
res <- backend.collective.updateLanguage(user.account.collective, settings.language)
resp <- Ok(Conversions.basicResult(res, "Language updated."))
} yield resp
case GET -> Root / "settings" =>
for {
collDb <- backend.collective.find(user.account.collective)
sett = collDb.map(c => CollectiveSettings(c.language))
resp <- sett.toResponse()
collDb <- backend.collective.find(user.account.collective)
sett = collDb.map(c => CollectiveSettings(c.language))
resp <- sett.toResponse()
} yield resp
case GET -> Root =>
for {
collDb <- backend.collective.find(user.account.collective)
coll = collDb.map(c => Collective(c.id, c.state, c.created))
resp <- coll.toResponse()
collDb <- backend.collective.find(user.account.collective)
coll = collDb.map(c => Collective(c.id, c.state, c.created))
resp <- coll.toResponse()
} yield resp
}
}

View File

@ -15,7 +15,7 @@ import org.http4s.dsl.Http4sDsl
object EquipmentRoutes {
def apply[F[_]: Effect](backend: BackendApp[F], user: AuthToken): HttpRoutes[F] = {
val dsl = new Http4sDsl[F]{}
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of {
@ -36,12 +36,12 @@ object EquipmentRoutes {
case req @ PUT -> Root =>
for {
data <- req.as[Equipment]
equip = changeEquipment(data, user.account.collective)
equip = changeEquipment(data, user.account.collective)
res <- backend.equipment.update(equip)
resp <- Ok(basicResult(res, "Equipment updated."))
} yield resp
case DELETE -> Root / Ident(id) =>
case DELETE -> Root / Ident(id) =>
for {
del <- backend.equipment.delete(id, user.account.collective)
resp <- Ok(basicResult(del, "Equipment deleted."))

View File

@ -10,15 +10,19 @@ import org.http4s.dsl.Http4sDsl
object InfoRoutes {
def apply[F[_]: Sync](): HttpRoutes[F] = {
val dsl = new Http4sDsl[F]{}
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of[F] {
case GET -> (Root / "version") =>
Ok(VersionInfo(BuildInfo.version
, BuildInfo.builtAtMillis
, BuildInfo.builtAtString
, BuildInfo.gitHeadCommit.getOrElse("")
, BuildInfo.gitDescribedVersion.getOrElse("")))
Ok(
VersionInfo(
BuildInfo.version,
BuildInfo.builtAtMillis,
BuildInfo.builtAtString,
BuildInfo.gitHeadCommit.getOrElse(""),
BuildInfo.gitDescribedVersion.getOrElse("")
)
)
}
}
}

View File

@ -18,24 +18,24 @@ object ItemRoutes {
private[this] val logger = getLogger
def apply[F[_]: Effect](backend: BackendApp[F], user: AuthToken): HttpRoutes[F] = {
val dsl = new Http4sDsl[F]{}
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of {
case req @ POST -> Root / "search" =>
for {
mask <- req.as[ItemSearch]
_ <- logger.ftrace(s"Got search mask: $mask")
query = Conversions.mkQuery(mask, user.account.collective)
_ <- logger.ftrace(s"Running query: $query")
items <- backend.item.findItems(query, 100)
resp <- Ok(Conversions.mkItemList(items))
mask <- req.as[ItemSearch]
_ <- logger.ftrace(s"Got search mask: $mask")
query = Conversions.mkQuery(mask, user.account.collective)
_ <- logger.ftrace(s"Running query: $query")
items <- backend.item.findItems(query, 100)
resp <- Ok(Conversions.mkItemList(items))
} yield resp
case GET -> Root / Ident(id) =>
for {
item <- backend.item.findItem(id, user.account.collective)
result = item.map(Conversions.mkItemDetail)
result = item.map(Conversions.mkItemDetail)
resp <- result.map(r => Ok(r)).getOrElse(NotFound(BasicResult(false, "Not found.")))
} yield resp
@ -51,89 +51,89 @@ object ItemRoutes {
resp <- Ok(Conversions.basicResult(res, "Item back to created."))
} yield resp
case req@POST -> Root / Ident(id) / "tags" =>
case req @ POST -> Root / Ident(id) / "tags" =>
for {
tags <- req.as[ReferenceList].map(_.items)
res <- backend.item.setTags(id, tags.map(_.id), user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Tags updated"))
tags <- req.as[ReferenceList].map(_.items)
res <- backend.item.setTags(id, tags.map(_.id), user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Tags updated"))
} yield resp
case req@POST -> Root / Ident(id) / "direction" =>
case req @ POST -> Root / Ident(id) / "direction" =>
for {
dir <- req.as[DirectionValue]
res <- backend.item.setDirection(id, dir.direction, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Direction updated"))
} yield resp
case req@POST -> Root / Ident(id) / "corrOrg" =>
case req @ POST -> Root / Ident(id) / "corrOrg" =>
for {
idref <- req.as[OptionalId]
res <- backend.item.setCorrOrg(id, idref.id, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Correspondent organization updated"))
idref <- req.as[OptionalId]
res <- backend.item.setCorrOrg(id, idref.id, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Correspondent organization updated"))
} yield resp
case req@POST -> Root / Ident(id) / "corrPerson" =>
case req @ POST -> Root / Ident(id) / "corrPerson" =>
for {
idref <- req.as[OptionalId]
res <- backend.item.setCorrPerson(id, idref.id, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Correspondent person updated"))
idref <- req.as[OptionalId]
res <- backend.item.setCorrPerson(id, idref.id, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Correspondent person updated"))
} yield resp
case req@POST -> Root / Ident(id) / "concPerson" =>
case req @ POST -> Root / Ident(id) / "concPerson" =>
for {
idref <- req.as[OptionalId]
res <- backend.item.setConcPerson(id, idref.id, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Concerned person updated"))
idref <- req.as[OptionalId]
res <- backend.item.setConcPerson(id, idref.id, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Concerned person updated"))
} yield resp
case req@POST -> Root / Ident(id) / "concEquipment" =>
case req @ POST -> Root / Ident(id) / "concEquipment" =>
for {
idref <- req.as[OptionalId]
res <- backend.item.setConcEquip(id, idref.id, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated"))
} yield resp
case req@POST -> Root / Ident(id) / "notes" =>
for {
text <- req.as[OptionalText]
res <- backend.item.setNotes(id, text.text, user.account.collective)
idref <- req.as[OptionalId]
res <- backend.item.setConcEquip(id, idref.id, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated"))
} yield resp
case req@POST -> Root / Ident(id) / "name" =>
case req @ POST -> Root / Ident(id) / "notes" =>
for {
text <- req.as[OptionalText]
res <- backend.item.setName(id, text.text.getOrElse(""), user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated"))
text <- req.as[OptionalText]
res <- backend.item.setNotes(id, text.text, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated"))
} yield resp
case req@POST -> Root / Ident(id) / "duedate" =>
case req @ POST -> Root / Ident(id) / "name" =>
for {
date <- req.as[OptionalDate]
_ <- logger.fdebug(s"Setting item due date to ${date.date}")
res <- backend.item.setItemDueDate(id, date.date, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Item due date updated"))
text <- req.as[OptionalText]
res <- backend.item.setName(id, text.text.getOrElse(""), user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated"))
} yield resp
case req@POST -> Root / Ident(id) / "date" =>
case req @ POST -> Root / Ident(id) / "duedate" =>
for {
date <- req.as[OptionalDate]
_ <- logger.fdebug(s"Setting item date to ${date.date}")
res <- backend.item.setItemDate(id, date.date, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Item date updated"))
date <- req.as[OptionalDate]
_ <- logger.fdebug(s"Setting item due date to ${date.date}")
res <- backend.item.setItemDueDate(id, date.date, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Item due date updated"))
} yield resp
case req @ POST -> Root / Ident(id) / "date" =>
for {
date <- req.as[OptionalDate]
_ <- logger.fdebug(s"Setting item date to ${date.date}")
res <- backend.item.setItemDate(id, date.date, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Item date updated"))
} yield resp
case GET -> Root / Ident(id) / "proposals" =>
for {
ml <- backend.item.getProposals(id, user.account.collective)
ip = Conversions.mkItemProposals(ml)
ip = Conversions.mkItemProposals(ml)
resp <- Ok(ip)
} yield resp
case DELETE -> Root / Ident(id) =>
for {
n <- backend.item.delete(id, user.account.collective)
res = BasicResult(n > 0, if (n > 0) "Item deleted" else "Item deletion failed.")
res = BasicResult(n > 0, if (n > 0) "Item deleted" else "Item deletion failed.")
resp <- Ok(res)
} yield resp
}

View File

@ -19,15 +19,15 @@ object JobQueueRoutes {
HttpRoutes.of {
case GET -> Root / "state" =>
for {
js <- backend.job.queueState(user.account.collective, 200)
js <- backend.job.queueState(user.account.collective, 200)
res = Conversions.mkJobQueueState(js)
resp <- Ok(res)
} yield resp
case POST -> Root / Ident(id) / "cancel" =>
for {
result <- backend.job.cancelJob(id, user.account.collective)
resp <- Ok(Conversions.basicResult(result))
result <- backend.job.cancelJob(id, user.account.collective)
resp <- Ok(Conversions.basicResult(result))
} yield resp
}
}

View File

@ -18,10 +18,10 @@ object LoginRoutes {
import dsl._
HttpRoutes.of[F] {
case req@POST -> Root / "login" =>
case req @ POST -> Root / "login" =>
for {
up <- req.as[UserPass]
res <- S.loginUserPass(cfg.auth)(Login.UserPass(up.account, up.password))
up <- req.as[UserPass]
res <- S.loginUserPass(cfg.auth)(Login.UserPass(up.account, up.password))
resp <- makeResponse(dsl, cfg, res, up.account)
} yield resp
}
@ -33,22 +33,36 @@ object LoginRoutes {
HttpRoutes.of[F] {
case req @ POST -> Root / "session" =>
Authenticate.authenticateRequest(S.loginSession(cfg.auth))(req).
flatMap(res => makeResponse(dsl, cfg, res, ""))
Authenticate
.authenticateRequest(S.loginSession(cfg.auth))(req)
.flatMap(res => makeResponse(dsl, cfg, res, ""))
case POST -> Root / "logout" =>
Ok().map(_.addCookie(CookieData.deleteCookie(cfg)))
}
}
def makeResponse[F[_]: Effect](dsl: Http4sDsl[F], cfg: Config, res: Login.Result, account: String): F[Response[F]] = {
def makeResponse[F[_]: Effect](
dsl: Http4sDsl[F],
cfg: Config,
res: Login.Result,
account: String
): F[Response[F]] = {
import dsl._
res match {
case Login.Result.Ok(token) =>
for {
cd <- AuthToken.user(token.account, cfg.auth.serverSecret).map(CookieData.apply)
resp <- Ok(AuthResult(token.account.collective.id, token.account.user.id, true, "Login successful", Some(cd.asString), cfg.auth.sessionValid.millis)).
map(_.addCookie(cd.asCookie(cfg)))
resp <- Ok(
AuthResult(
token.account.collective.id,
token.account.user.id,
true,
"Login successful",
Some(cd.asString),
cfg.auth.sessionValid.millis
)
).map(_.addCookie(cd.asCookie(cfg)))
} yield resp
case _ =>
Ok(AuthResult("", account, false, "Login failed.", None, 0L))

View File

@ -16,15 +16,15 @@ import org.http4s.dsl.Http4sDsl
object OrganizationRoutes {
def apply[F[_]: Effect](backend: BackendApp[F], user: AuthToken): HttpRoutes[F] = {
val dsl = new Http4sDsl[F]{}
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of {
case GET -> Root :? FullQueryParamMatcher(full) =>
if (full.getOrElse(false)) {
for {
data <- backend.organization.findAllOrg(user.account)
resp <- Ok(OrganizationList(data.map(mkOrg).toList))
data <- backend.organization.findAllOrg(user.account)
resp <- Ok(OrganizationList(data.map(mkOrg).toList))
} yield resp
} else {
for {
@ -38,7 +38,7 @@ object OrganizationRoutes {
data <- req.as[Organization]
newOrg <- newOrg(data, user.account.collective)
added <- backend.organization.addOrg(newOrg)
resp <- Ok(basicResult(added, "New organization saved."))
resp <- Ok(basicResult(added, "New organization saved."))
} yield resp
case req @ PUT -> Root =>
@ -49,10 +49,10 @@ object OrganizationRoutes {
resp <- Ok(basicResult(update, "Organization updated."))
} yield resp
case DELETE -> Root / Ident(id) =>
case DELETE -> Root / Ident(id) =>
for {
delOrg <- backend.organization.deleteOrg(id, user.account.collective)
resp <- Ok(basicResult(delOrg, "Organization deleted."))
delOrg <- backend.organization.deleteOrg(id, user.account.collective)
resp <- Ok(basicResult(delOrg, "Organization deleted."))
} yield resp
}
}

View File

@ -6,9 +6,10 @@ import org.http4s.dsl.impl.OptionalQueryParamDecoderMatcher
object ParamDecoder {
implicit val booleanDecoder: QueryParamDecoder[Boolean] =
QueryParamDecoder.fromUnsafeCast(qp => Option(qp.value).exists(_ equalsIgnoreCase "true"))("Boolean")
QueryParamDecoder.fromUnsafeCast(qp => Option(qp.value).exists(_.equalsIgnoreCase("true")))(
"Boolean"
)
object FullQueryParamMatcher extends OptionalQueryParamDecoderMatcher[Boolean]("full")
}

View File

@ -19,15 +19,15 @@ object PersonRoutes {
private[this] val logger = getLogger
def apply[F[_]: Effect](backend: BackendApp[F], user: AuthToken): HttpRoutes[F] = {
val dsl = new Http4sDsl[F]{}
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of {
case GET -> Root :? FullQueryParamMatcher(full) =>
case GET -> Root :? FullQueryParamMatcher(full) =>
if (full.getOrElse(false)) {
for {
data <- backend.organization.findAllPerson(user.account)
resp <- Ok(PersonList(data.map(mkPerson).toList))
data <- backend.organization.findAllPerson(user.account)
resp <- Ok(PersonList(data.map(mkPerson).toList))
} yield resp
} else {
for {
@ -41,7 +41,7 @@ object PersonRoutes {
data <- req.as[Person]
newPer <- newPerson(data, user.account.collective)
added <- backend.organization.addPerson(newPer)
resp <- Ok(basicResult(added, "New person saved."))
resp <- Ok(basicResult(added, "New person saved."))
} yield resp
case req @ PUT -> Root =>
@ -52,11 +52,11 @@ object PersonRoutes {
resp <- Ok(basicResult(update, "Person updated."))
} yield resp
case DELETE -> Root / Ident(id) =>
case DELETE -> Root / Ident(id) =>
for {
_ <- logger.fdebug(s"Deleting person ${id.id}")
delOrg <- backend.organization.deletePerson(id, user.account.collective)
resp <- Ok(basicResult(delOrg, "Person deleted."))
_ <- logger.fdebug(s"Deleting person ${id.id}")
delOrg <- backend.organization.deletePerson(id, user.account.collective)
resp <- Ok(basicResult(delOrg, "Person deleted."))
} yield resp
}
}

View File

@ -24,16 +24,16 @@ object RegisterRoutes {
HttpRoutes.of {
case req @ POST -> Root / "register" =>
for {
data <- req.as[Registration]
res <- backend.signup.register(cfg.backend.signup)(convert(data))
resp <- Ok(convert(res))
data <- req.as[Registration]
res <- backend.signup.register(cfg.backend.signup)(convert(data))
resp <- Ok(convert(res))
} yield resp
case req@ POST -> Root / "newinvite" =>
case req @ POST -> Root / "newinvite" =>
for {
data <- req.as[GenInvite]
res <- backend.signup.newInvite(cfg.backend.signup)(data.password)
resp <- Ok(convert(res))
data <- req.as[GenInvite]
res <- backend.signup.newInvite(cfg.backend.signup)(data.password)
resp <- Ok(convert(res))
} yield resp
}
}
@ -47,7 +47,6 @@ object RegisterRoutes {
InviteResult(false, "Password is invalid.", None)
}
def convert(r: SignupResult): BasicResult = r match {
case SignupResult.CollectiveExists =>
BasicResult(false, "A collective with this name already exists.")
@ -62,7 +61,6 @@ object RegisterRoutes {
BasicResult(true, "Signup successful")
}
def convert(r: Registration): RegisterData =
RegisterData(r.collectiveName, r.login, r.password, r.invite)
}

View File

@ -22,8 +22,8 @@ object SourceRoutes {
HttpRoutes.of {
case GET -> Root =>
for {
all <- backend.source.findAll(user.account)
res <- Ok(SourceList(all.map(mkSource).toList))
all <- backend.source.findAll(user.account)
res <- Ok(SourceList(all.map(mkSource).toList))
} yield res
case req @ POST -> Root =>
@ -37,12 +37,12 @@ object SourceRoutes {
case req @ PUT -> Root =>
for {
data <- req.as[Source]
src = changeSource(data, user.account.collective)
src = changeSource(data, user.account.collective)
updated <- backend.source.update(src)
resp <- Ok(basicResult(updated, "Source updated."))
} yield resp
case DELETE -> Root / Ident(id) =>
case DELETE -> Root / Ident(id) =>
for {
del <- backend.source.delete(id, user.account.collective)
resp <- Ok(basicResult(del, "Source deleted."))

View File

@ -28,21 +28,21 @@ object TagRoutes {
case req @ POST -> Root =>
for {
data <- req.as[Tag]
tag <- newTag(data, user.account.collective)
res <- backend.tag.add(tag)
resp <- Ok(basicResult(res, "Tag successfully created."))
data <- req.as[Tag]
tag <- newTag(data, user.account.collective)
res <- backend.tag.add(tag)
resp <- Ok(basicResult(res, "Tag successfully created."))
} yield resp
case req @ PUT -> Root =>
for {
data <- req.as[Tag]
tag = changeTag(data, user.account.collective)
res <- backend.tag.update(tag)
resp <- Ok(basicResult(res, "Tag successfully updated."))
data <- req.as[Tag]
tag = changeTag(data, user.account.collective)
res <- backend.tag.update(tag)
resp <- Ok(basicResult(res, "Tag successfully updated."))
} yield resp
case DELETE -> Root / Ident(id) =>
case DELETE -> Root / Ident(id) =>
for {
del <- backend.tag.delete(id, user.account.collective)
resp <- Ok(basicResult(del, "Tag successfully deleted."))

View File

@ -26,9 +26,14 @@ object UploadRoutes {
case req @ POST -> Root / "item" =>
for {
multipart <- req.as[Multipart[F]]
updata <- readMultipart(multipart, logger, Priority.High, cfg.backend.files.validMimeTypes)
result <- backend.upload.submit(updata, user.account)
res <- Ok(basicResult(result))
updata <- readMultipart(
multipart,
logger,
Priority.High,
cfg.backend.files.validMimeTypes
)
result <- backend.upload.submit(updata, user.account)
res <- Ok(basicResult(result))
} yield res
}
@ -39,12 +44,12 @@ object UploadRoutes {
import dsl._
HttpRoutes.of {
case req @ POST -> Root / "item" / Ident(id)=>
case req @ POST -> Root / "item" / Ident(id) =>
for {
multipart <- req.as[Multipart[F]]
updata <- readMultipart(multipart, logger, Priority.Low, cfg.backend.files.validMimeTypes)
result <- backend.upload.submit(updata, id)
res <- Ok(basicResult(result))
res <- Ok(basicResult(result))
} yield res
}
}

View File

@ -22,15 +22,19 @@ object UserRoutes {
HttpRoutes.of {
case req @ POST -> Root / "changePassword" =>
for {
data <- req.as[PasswordChange]
res <- backend.collective.changePassword(user.account, data.currentPassword, data.newPassword)
resp <- Ok(basicResult(res))
data <- req.as[PasswordChange]
res <- backend.collective.changePassword(
user.account,
data.currentPassword,
data.newPassword
)
resp <- Ok(basicResult(res))
} yield resp
case GET -> Root =>
for {
all <- backend.collective.listUser(user.account.collective)
res <- Ok(UserList(all.map(mkUser).toList))
all <- backend.collective.listUser(user.account.collective)
res <- Ok(UserList(all.map(mkUser).toList))
} yield res
case req @ POST -> Root =>
@ -51,7 +55,7 @@ object UserRoutes {
case DELETE -> Root / Ident(id) =>
for {
ar <- backend.collective.deleteUser(id, user.account.collective)
ar <- backend.collective.deleteUser(id, user.account.collective)
resp <- Ok(basicResult(ar, "User deleted."))
} yield resp
}

View File

@ -8,14 +8,21 @@ import docspell.backend.signup.{Config => SignupConfig}
import yamusca.imports._
import yamusca.implicits._
case class Flags( appName: String
, baseUrl: LenientUri
, signupMode: SignupConfig.Mode
, docspellAssetPath: String)
case class Flags(
appName: String,
baseUrl: LenientUri,
signupMode: SignupConfig.Mode,
docspellAssetPath: String
)
object Flags {
def apply(cfg: Config): Flags =
Flags(cfg.appName, cfg.baseUrl, cfg.backend.signup.mode, s"assets/docspell-webapp/${BuildInfo.version}")
Flags(
cfg.appName,
cfg.baseUrl,
cfg.backend.signup.mode,
s"assets/docspell-webapp/${BuildInfo.version}"
)
implicit val jsonEncoder: Encoder[Flags] =
deriveEncoder[Flags]

View File

@ -21,90 +21,100 @@ object TemplateRoutes {
val `text/html` = new MediaType("text", "html")
def apply[F[_]: Effect](blocker: Blocker, cfg: Config)(implicit C: ContextShift[F]): HttpRoutes[F] = {
def apply[F[_]: Effect](blocker: Blocker, cfg: Config)(
implicit C: ContextShift[F]
): HttpRoutes[F] = {
val indexTemplate = memo(loadResource("/index.html").flatMap(loadTemplate(_, blocker)))
val docTemplate = memo(loadResource("/doc.html").flatMap(loadTemplate(_, blocker)))
val docTemplate = memo(loadResource("/doc.html").flatMap(loadTemplate(_, blocker)))
val dsl = new Http4sDsl[F]{}
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of[F] {
case GET -> Root / "index.html" =>
for {
templ <- indexTemplate
resp <- Ok(IndexData(cfg).render(templ), `Content-Type`(`text/html`))
templ <- indexTemplate
resp <- Ok(IndexData(cfg).render(templ), `Content-Type`(`text/html`))
} yield resp
case GET -> Root / "doc" =>
for {
templ <- docTemplate
resp <- Ok(DocData().render(templ), `Content-Type`(`text/html`))
templ <- docTemplate
resp <- Ok(DocData().render(templ), `Content-Type`(`text/html`))
} yield resp
}
}
def loadResource[F[_]: Sync](name: String): F[URL] = {
def loadResource[F[_]: Sync](name: String): F[URL] =
Option(getClass.getResource(name)) match {
case None =>
Sync[F].raiseError(new Exception("Unknown resource: "+ name))
Sync[F].raiseError(new Exception("Unknown resource: " + name))
case Some(r) =>
r.pure[F]
}
}
def loadUrl[F[_]: Sync](url: URL, blocker: Blocker)(implicit C: ContextShift[F]): F[String] =
Stream.bracket(Sync[F].delay(url.openStream))(in => Sync[F].delay(in.close())).
flatMap(in => io.readInputStream(in.pure[F], 64 * 1024, blocker, false)).
through(text.utf8Decode).
compile.fold("")(_ + _)
Stream
.bracket(Sync[F].delay(url.openStream))(in => Sync[F].delay(in.close()))
.flatMap(in => io.readInputStream(in.pure[F], 64 * 1024, blocker, false))
.through(text.utf8Decode)
.compile
.fold("")(_ + _)
def parseTemplate[F[_]: Sync](str: String): F[Template] =
Sync[F].delay {
mustache.parse(str) match {
case Right(t) => t
case Right(t) => t
case Left((_, err)) => sys.error(err)
}
}
def loadTemplate[F[_]: Sync](url: URL, blocker: Blocker)(implicit C: ContextShift[F]): F[Template] = {
loadUrl[F](url, blocker).flatMap(s => parseTemplate(s)).
map(t => {
logger.info(s"Compiled template $url")
t
})
}
def loadTemplate[F[_]: Sync](url: URL, blocker: Blocker)(
implicit C: ContextShift[F]
): F[Template] =
loadUrl[F](url, blocker).flatMap(s => parseTemplate(s)).map { t =>
logger.info(s"Compiled template $url")
t
}
case class DocData(swaggerRoot: String, openapiSpec: String)
object DocData {
def apply(): DocData =
DocData("/app/assets" + Webjars.swaggerui, s"/app/assets/${BuildInfo.name}/${BuildInfo.version}/docspell-openapi.yml")
DocData(
"/app/assets" + Webjars.swaggerui,
s"/app/assets/${BuildInfo.name}/${BuildInfo.version}/docspell-openapi.yml"
)
implicit def yamuscaValueConverter: ValueConverter[DocData] =
ValueConverter.deriveConverter[DocData]
}
case class IndexData(flags: Flags
, cssUrls: Seq[String]
, jsUrls: Seq[String]
, faviconBase: String
, appExtraJs: String
, flagsJson: String)
case class IndexData(
flags: Flags,
cssUrls: Seq[String],
jsUrls: Seq[String],
faviconBase: String,
appExtraJs: String,
flagsJson: String
)
object IndexData {
def apply(cfg: Config): IndexData =
IndexData(Flags(cfg)
, Seq(
IndexData(
Flags(cfg),
Seq(
"/app/assets" + Webjars.semanticui + "/semantic.min.css",
s"/app/assets/docspell-webapp/${BuildInfo.version}/docspell.css"
)
, Seq(
),
Seq(
"/app/assets" + Webjars.jquery + "/jquery.min.js",
"/app/assets" + Webjars.semanticui + "/semantic.min.js",
s"/app/assets/docspell-webapp/${BuildInfo.version}/docspell-app.js"
)
, s"/app/assets/docspell-webapp/${BuildInfo.version}/favicon"
, s"/app/assets/docspell-webapp/${BuildInfo.version}/docspell.js"
, Flags(cfg).asJson.spaces2 )
),
s"/app/assets/docspell-webapp/${BuildInfo.version}/favicon",
s"/app/assets/docspell-webapp/${BuildInfo.version}/docspell.js",
Flags(cfg).asJson.spaces2
)
implicit def yamuscaValueConverter: ValueConverter[IndexData] =
ValueConverter.deriveConverter[IndexData]
@ -116,10 +126,10 @@ object TemplateRoutes {
Option(ref.get) match {
case Some(a) => a.pure[F]
case None =>
fa.map(a => {
fa.map { a =>
ref.set(a)
a
})
}
}
}
}

View File

@ -9,7 +9,7 @@ import org.http4s.server.staticcontent.WebjarService.{WebjarAsset, Config => Web
object WebjarRoutes {
def appRoutes[F[_]: Effect](blocker: Blocker)(implicit C: ContextShift[F]): HttpRoutes[F] = {
def appRoutes[F[_]: Effect](blocker: Blocker)(implicit C: ContextShift[F]): HttpRoutes[F] =
webjarService(
WebjarConfig(
filter = assetFilter,
@ -17,10 +17,23 @@ object WebjarRoutes {
cacheStrategy = NoopCacheStrategy[F]
)
)
}
def assetFilter(asset: WebjarAsset): Boolean =
List(".js", ".css", ".html", ".json", ".jpg", ".png", ".eot", ".woff", ".woff2", ".svg", ".otf", ".ttf", ".yml", ".xml").
exists(e => asset.asset.endsWith(e))
List(
".js",
".css",
".html",
".json",
".jpg",
".png",
".eot",
".woff",
".woff2",
".svg",
".otf",
".ttf",
".yml",
".xml"
).exists(e => asset.asset.endsWith(e))
}

View File

@ -18,14 +18,14 @@ object AddResult {
e.fold(Failure, n => if (n > 0) Success else Failure(new Exception("No rows updated")))
case object Success extends AddResult {
def toEither = Right(())
def toEither = Right(())
val isSuccess = true
def fold[A](fa: Success.type => A, fb: EntityExists => A, fc: Failure => A): A =
fa(this)
}
case class EntityExists(msg: String) extends AddResult {
def toEither = Left(new Exception(msg))
def toEither = Left(new Exception(msg))
val isSuccess = false
def fold[A](fa: Success.type => A, fb: EntityExists => A, fc: Failure => A): A =
fb(this)
@ -35,7 +35,7 @@ object AddResult {
}
case class Failure(ex: Throwable) extends AddResult {
def toEither = Left(ex)
def toEither = Left(ex)
val isSuccess = false
def fold[A](fa: Success.type => A, fb: EntityExists => A, fc: Failure => A): A =
fc(this)

View File

@ -2,10 +2,7 @@ package docspell.store
import docspell.common.LenientUri
case class JdbcConfig(url: LenientUri
, user: String
, password: String
) {
case class JdbcConfig(url: LenientUri, user: String, password: String) {
val dbmsName: Option[String] =
JdbcConfig.extractDbmsName(url)

View File

@ -22,21 +22,25 @@ trait Store[F[_]] {
object Store {
def create[F[_]: Effect: ContextShift](jdbc: JdbcConfig
, connectEC: ExecutionContext
, blocker: Blocker): Resource[F, Store[F]] = {
def create[F[_]: Effect: ContextShift](
jdbc: JdbcConfig,
connectEC: ExecutionContext,
blocker: Blocker
): Resource[F, Store[F]] = {
val hxa = HikariTransactor.newHikariTransactor[F](jdbc.driverClass
, jdbc.url.asString
, jdbc.user
, jdbc.password
, connectEC
, blocker)
val hxa = HikariTransactor.newHikariTransactor[F](
jdbc.driverClass,
jdbc.url.asString,
jdbc.user,
jdbc.password,
connectEC,
blocker
)
for {
xa <- hxa
st = new StoreImpl[F](jdbc, xa)
_ <- Resource.liftF(st.migrate)
xa <- hxa
st = new StoreImpl[F](jdbc, xa)
_ <- Resource.liftF(st.migrate)
} yield st
}
}

View File

@ -24,7 +24,7 @@ case class Column(name: String, ns: String = "", alias: String = "") {
def is[A: Put](ov: Option[A]): Fragment = ov match {
case Some(v) => f ++ fr" = $v"
case None => fr"is null"
case None => fr"is null"
}
def is(c: Column): Fragment =
@ -42,7 +42,7 @@ case class Column(name: String, ns: String = "", alias: String = "") {
def isOrDiscard[A: Put](value: Option[A]): Fragment =
value match {
case Some(v) => is(v)
case None => Fragment.empty
case None => Fragment.empty
}
def isOneOf[A: Put](values: Seq[A]): Fragment = {

View File

@ -21,7 +21,9 @@ trait DoobieMeta {
})
def jsonMeta[A](implicit d: Decoder[A], e: Encoder[A]): Meta[A] =
Meta[String].imap(str => str.parseJsonAs[A].fold(ex => throw ex, identity))(a => e.apply(a).noSpaces)
Meta[String].imap(str => str.parseJsonAs[A].fold(ex => throw ex, identity))(a =>
e.apply(a).noSpaces
)
implicit val metaCollectiveState: Meta[CollectiveState] =
Meta[String].imap(CollectiveState.unsafe)(CollectiveState.asString)
@ -45,7 +47,9 @@ trait DoobieMeta {
Meta[String].imap(JobState.unsafe)(_.name)
implicit val metaDirection: Meta[Direction] =
Meta[Boolean].imap(flag => if (flag) Direction.Incoming: Direction else Direction.Outgoing: Direction)(d => Direction.isIncoming(d))
Meta[Boolean].imap(flag =>
if (flag) Direction.Incoming: Direction else Direction.Outgoing: Direction
)(d => Direction.isIncoming(d))
implicit val metaPriority: Meta[Priority] =
Meta[Int].imap(Priority.fromInt)(Priority.toInt)

View File

@ -19,7 +19,9 @@ trait DoobieSyntax {
commas(fa :: fas.toList)
def and(fs: Seq[Fragment]): Fragment =
Fragment.const(" (") ++ fs.filter(f => !isEmpty(f)).reduce(_ ++ Fragment.const(" AND ") ++ _) ++ Fragment.const(") ")
Fragment.const(" (") ++ fs
.filter(f => !isEmpty(f))
.reduce(_ ++ Fragment.const(" AND ") ++ _) ++ Fragment.const(") ")
def and(f0: Fragment, fs: Fragment*): Fragment =
and(f0 :: fs.toList)
@ -48,8 +50,9 @@ trait DoobieSyntax {
def insertRows(table: Fragment, cols: List[Column], vals: List[Fragment]): Fragment =
Fragment.const("INSERT INTO ") ++ table ++ Fragment.const(" (") ++
commas(cols.map(_.f)) ++ Fragment.const(") VALUES ") ++ commas(vals.map(f => sql"(" ++ f ++ sql")"))
commas(cols.map(_.f)) ++ Fragment.const(") VALUES ") ++ commas(
vals.map(f => sql"(" ++ f ++ sql")")
)
def selectSimple(cols: Seq[Column], table: Fragment, where: Fragment): Fragment =
selectSimple(commas(cols.map(_.f)), table, where)
@ -62,7 +65,6 @@ trait DoobieSyntax {
Fragment.const("SELECT DISTINCT(") ++ commas(cols.map(_.f)) ++
Fragment.const(") FROM ") ++ table ++ this.where(where)
// def selectJoinCollective(cols: Seq[Column], fkCid: Column, table: Fragment, wh: Fragment): Fragment =
// selectSimple(cols.map(_.prefix("a"))
// , table ++ fr"a," ++ RCollective.table ++ fr"b"
@ -70,11 +72,12 @@ trait DoobieSyntax {
// else and(wh, fkCid.prefix("a") is RCollective.Columns.id.prefix("b")))
def selectCount(col: Column, table: Fragment, where: Fragment): Fragment =
Fragment.const("SELECT COUNT(") ++ col.f ++ Fragment.const(") FROM ") ++ table ++ this.where(where)
Fragment.const("SELECT COUNT(") ++ col.f ++ Fragment.const(") FROM ") ++ table ++ this.where(
where
)
def deleteFrom(table: Fragment, where: Fragment): Fragment = {
def deleteFrom(table: Fragment, where: Fragment): Fragment =
fr"DELETE FROM" ++ table ++ this.where(where)
}
def withCTE(ps: (String, Fragment)*): Fragment = {
val subsel: Seq[Fragment] = ps.map(p => Fragment.const(p._1) ++ fr"AS (" ++ p._2 ++ fr")")

View File

@ -1,5 +1,3 @@
package docspell.store.impl
object Implicits extends DoobieMeta
with DoobieSyntax
object Implicits extends DoobieMeta with DoobieSyntax

View File

@ -10,7 +10,8 @@ import doobie._
import doobie.implicits._
final class StoreImpl[F[_]: Effect](jdbc: JdbcConfig, xa: Transactor[F]) extends Store[F] {
val bitpeaceCfg = BitpeaceConfig("filemeta", "filechunk", TikaMimetypeDetect, Ident.randomId[F].map(_.id))
val bitpeaceCfg =
BitpeaceConfig("filemeta", "filechunk", TikaMimetypeDetect, Ident.randomId[F].map(_.id))
def migrate: F[Int] =
FlywayMigrate.run[F](jdbc)
@ -24,14 +25,14 @@ final class StoreImpl[F[_]: Effect](jdbc: JdbcConfig, xa: Transactor[F]) extends
def bitpeace: Bitpeace[F] =
Bitpeace(bitpeaceCfg, xa)
def add(insert: ConnectionIO[Int], exists: ConnectionIO[Boolean]): F[AddResult] = {
def add(insert: ConnectionIO[Int], exists: ConnectionIO[Boolean]): F[AddResult] =
for {
save <- transact(insert).attempt
exist <- save.swap.traverse(ex => transact(exists).map(b => (ex, b)))
} yield exist.swap match {
case Right(_) => AddResult.Success
case Left((_, true)) => AddResult.EntityExists("Adding failed, because the entity already exists.")
case Left((_, true)) =>
AddResult.EntityExists("Adding failed, because the entity already exists.")
case Left((ex, _)) => AddResult.Failure(ex)
}
}
}

View File

@ -20,11 +20,12 @@ object FlywayMigrate {
}
logger.info(s"Using migration locations: $locations")
val fw = Flyway.configure().
cleanDisabled(true).
dataSource(jdbc.url.asString, jdbc.user, jdbc.password).
locations(locations: _*).
load()
val fw = Flyway
.configure()
.cleanDisabled(true)
.dataSource(jdbc.url.asString, jdbc.user, jdbc.password)
.locations(locations: _*)
.load()
fw.repair()
fw.migrate()

View File

@ -18,7 +18,7 @@ trait ONode[F[_]] {
object ONode {
private[this] val logger = getLogger
def apply[F[_] : Effect](store: Store[F]): Resource[F, ONode[F]] =
def apply[F[_]: Effect](store: Store[F]): Resource[F, ONode[F]] =
Resource.pure(new ONode[F] {
def register(appId: Ident, nodeType: NodeType, uri: LenientUri): F[Unit] =

View File

@ -12,58 +12,64 @@ import docspell.store.records.{RAttachment, RAttachmentMeta, RItem}
object QAttachment {
def deleteById[F[_]: Sync](store: Store[F])(attachId: Ident, coll: Ident): F[Int] = {
def deleteById[F[_]: Sync](store: Store[F])(attachId: Ident, coll: Ident): F[Int] =
for {
raOpt <- store.transact(RAttachment.findByIdAndCollective(attachId, coll))
n <- raOpt.traverse(_ => store.transact(RAttachment.delete(attachId)))
f <- Stream.emit(raOpt).
unNoneTerminate.
map(_.fileId.id).
flatMap(store.bitpeace.delete).
compile.last
f <- Stream
.emit(raOpt)
.unNoneTerminate
.map(_.fileId.id)
.flatMap(store.bitpeace.delete)
.compile
.last
} yield n.getOrElse(0) + f.map(_ => 1).getOrElse(0)
}
def deleteAttachment[F[_]: Sync](store: Store[F])(ra: RAttachment): F[Int] = {
def deleteAttachment[F[_]: Sync](store: Store[F])(ra: RAttachment): F[Int] =
for {
n <- store.transact(RAttachment.delete(ra.id))
f <- Stream.emit(ra.fileId.id).
flatMap(store.bitpeace.delete).
compile.last
n <- store.transact(RAttachment.delete(ra.id))
f <- Stream.emit(ra.fileId.id).flatMap(store.bitpeace.delete).compile.last
} yield n + f.map(_ => 1).getOrElse(0)
}
def deleteItemAttachments[F[_]: Sync](store: Store[F])(itemId: Ident, coll: Ident): F[Int] = {
def deleteItemAttachments[F[_]: Sync](store: Store[F])(itemId: Ident, coll: Ident): F[Int] =
for {
ras <- store.transact(RAttachment.findByItemAndCollective(itemId, coll))
ns <- ras.traverse(deleteAttachment[F](store))
} yield ns.sum
}
ras <- store.transact(RAttachment.findByItemAndCollective(itemId, coll))
ns <- ras.traverse(deleteAttachment[F](store))
} yield ns.sum
def getMetaProposals(itemId: Ident, coll: Ident): ConnectionIO[MetaProposalList] = {
val AC = RAttachment.Columns
val MC = RAttachmentMeta.Columns
val IC = RItem.Columns
val q = fr"SELECT" ++ MC.proposals.prefix("m").f ++ fr"FROM" ++ RAttachmentMeta.table ++ fr"m" ++
val q = fr"SELECT" ++ MC.proposals
.prefix("m")
.f ++ fr"FROM" ++ RAttachmentMeta.table ++ fr"m" ++
fr"INNER JOIN" ++ RAttachment.table ++ fr"a ON" ++ AC.id.prefix("a").is(MC.id.prefix("m")) ++
fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ AC.itemId.prefix("a").is(IC.id.prefix("i")) ++
fr"WHERE" ++ and(AC.itemId.prefix("a").is(itemId), IC.cid.prefix("i").is(coll))
for {
ml <- q.query[MetaProposalList].to[Vector]
ml <- q.query[MetaProposalList].to[Vector]
} yield MetaProposalList.flatten(ml)
}
def getAttachmentMeta(attachId: Ident, collective: Ident): ConnectionIO[Option[RAttachmentMeta]] = {
def getAttachmentMeta(
attachId: Ident,
collective: Ident
): ConnectionIO[Option[RAttachmentMeta]] = {
val AC = RAttachment.Columns
val MC = RAttachmentMeta.Columns
val IC = RItem.Columns
val q = fr"SELECT" ++ commas(MC.all.map(_.prefix("m").f)) ++ fr"FROM" ++ RItem.table ++ fr"i" ++
fr"INNER JOIN" ++ RAttachment.table ++ fr"a ON" ++ IC.id.prefix("i").is(AC.itemId.prefix("a")) ++
fr"INNER JOIN" ++ RAttachmentMeta.table ++ fr"m ON" ++ AC.id.prefix("a").is(MC.id.prefix("m")) ++
fr"WHERE" ++ and(AC.id.prefix("a") is attachId, IC.cid.prefix("i") is collective)
fr"INNER JOIN" ++ RAttachment.table ++ fr"a ON" ++ IC.id
.prefix("i")
.is(AC.itemId.prefix("a")) ++
fr"INNER JOIN" ++ RAttachmentMeta.table ++ fr"m ON" ++ AC.id
.prefix("a")
.is(MC.id.prefix("m")) ++
fr"WHERE" ++ and(AC.id.prefix("a").is(attachId), IC.cid.prefix("i").is(collective))
q.query[RAttachmentMeta].option
}

View File

@ -8,27 +8,35 @@ import docspell.store.records.{RAttachment, RItem, RTag, RTagItem}
object QCollective {
case class InsightData( incoming: Int
, outgoing: Int
, bytes: Long
, tags: Map[String, Int])
case class InsightData(incoming: Int, outgoing: Int, bytes: Long, tags: Map[String, Int])
def getInsights(coll: Ident): ConnectionIO[InsightData] = {
val IC = RItem.Columns
val AC = RAttachment.Columns
val TC = RTag.Columns
val RC = RTagItem.Columns
val q0 = selectCount(IC.id, RItem.table, and(IC.cid is coll, IC.incoming is Direction.incoming)).
query[Int].unique
val q1 = selectCount(IC.id, RItem.table, and(IC.cid is coll, IC.incoming is Direction.outgoing)).
query[Int].unique
val q0 = selectCount(
IC.id,
RItem.table,
and(IC.cid.is(coll), IC.incoming.is(Direction.incoming))
).query[Int].unique
val q1 = selectCount(
IC.id,
RItem.table,
and(IC.cid.is(coll), IC.incoming.is(Direction.outgoing))
).query[Int].unique
val q2 = fr"SELECT sum(m.length) FROM" ++ RItem.table ++ fr"i" ++
fr"INNER JOIN" ++ RAttachment.table ++ fr"a ON" ++ AC.itemId.prefix("a").is(IC.id.prefix("i")) ++
fr"INNER JOIN" ++ RAttachment.table ++ fr"a ON" ++ AC.itemId
.prefix("a")
.is(IC.id.prefix("i")) ++
fr"INNER JOIN filemeta m ON m.id =" ++ AC.fileId.prefix("a").f ++
fr"WHERE" ++ IC.cid.is(coll)
val q3 = fr"SELECT" ++ commas(TC.name.prefix("t").f,fr"count(" ++ RC.itemId.prefix("r").f ++ fr")") ++
val q3 = fr"SELECT" ++ commas(
TC.name.prefix("t").f,
fr"count(" ++ RC.itemId.prefix("r").f ++ fr")"
) ++
fr"FROM" ++ RTagItem.table ++ fr"r" ++
fr"INNER JOIN" ++ RTag.table ++ fr"t ON" ++ RC.tagId.prefix("r").is(TC.tid.prefix("t")) ++
fr"WHERE" ++ TC.cid.prefix("t").is(coll) ++

Some files were not shown because too many files have changed in this diff Show More