mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-06-21 18:08:25 +00:00
Provide tasks with ability to return data and human message
To allow better communication from background tasks, tasks can return not only data (json), but also a human readable message which is send via notification channels
This commit is contained in:
@ -9,14 +9,12 @@ package docspell.joex.filecopy
|
|||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
import docspell.common.FileCopyTaskArgs.Selection
|
import docspell.common.FileCopyTaskArgs.Selection
|
||||||
import docspell.common.{FileCopyTaskArgs, Ident}
|
import docspell.common.{FileCopyTaskArgs, Ident}
|
||||||
import docspell.joex.Config
|
import docspell.joex.Config
|
||||||
import docspell.joex.scheduler.Task
|
import docspell.joex.scheduler.{JobTaskResultEncoder, Task}
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
import docspell.store.file.{BinnyUtils, FileRepository, FileRepositoryConfig}
|
import docspell.store.file.{BinnyUtils, FileRepository, FileRepositoryConfig}
|
||||||
|
|
||||||
import binny.CopyTool.Counter
|
import binny.CopyTool.Counter
|
||||||
import binny.{BinaryId, BinaryStore, CopyTool}
|
import binny.{BinaryId, BinaryStore, CopyTool}
|
||||||
import io.circe.generic.semiauto.deriveCodec
|
import io.circe.generic.semiauto.deriveCodec
|
||||||
@ -56,6 +54,16 @@ object FileCopyTask {
|
|||||||
deriveCodec
|
deriveCodec
|
||||||
implicit val jsonCodec: Codec[CopyResult] =
|
implicit val jsonCodec: Codec[CopyResult] =
|
||||||
deriveCodec
|
deriveCodec
|
||||||
|
|
||||||
|
implicit val jobTaskResultEncoder: JobTaskResultEncoder[CopyResult] =
|
||||||
|
JobTaskResultEncoder.fromJson[CopyResult].withMessage { result =>
|
||||||
|
val allGood = result.counter.map(_.success).sum
|
||||||
|
val failed = result.counter.map(_.failed.size).sum
|
||||||
|
if (result.success)
|
||||||
|
s"Successfully copied $allGood files to ${result.counter.size} stores."
|
||||||
|
else
|
||||||
|
s"Copying files failed for ${failed} files! ${allGood} were copied successfully."
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def onCancel[F[_]]: Task[F, Args, Unit] =
|
def onCancel[F[_]]: Task[F, Args, Unit] =
|
||||||
@ -91,7 +99,7 @@ object FileCopyTask {
|
|||||||
|
|
||||||
data match {
|
data match {
|
||||||
case Right((from, tos)) =>
|
case Right((from, tos)) =>
|
||||||
ctx.logger.info(s"Start copying all files from ") *>
|
ctx.logger.info(s"Start copying all files from $from") *>
|
||||||
copy(ctx.logger, from, tos).flatTap(r =>
|
copy(ctx.logger, from, tos).flatTap(r =>
|
||||||
if (r.success) ctx.logger.info(s"Copying finished: ${r.counter}")
|
if (r.success) ctx.logger.info(s"Copying finished: ${r.counter}")
|
||||||
else ctx.logger.error(s"Copying failed: $r")
|
else ctx.logger.error(s"Copying failed: $r")
|
||||||
|
@ -9,34 +9,49 @@ package docspell.joex.filecopy
|
|||||||
import cats.Monoid
|
import cats.Monoid
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
import docspell.backend.ops.OFileRepository
|
import docspell.backend.ops.OFileRepository
|
||||||
import docspell.backend.ops.OFileRepository.IntegrityResult
|
import docspell.backend.ops.OFileRepository.IntegrityResult
|
||||||
import docspell.common.{FileIntegrityCheckArgs, FileKey}
|
import docspell.common.{FileIntegrityCheckArgs, FileKey}
|
||||||
import docspell.joex.scheduler.Task
|
import docspell.joex.scheduler.{JobTaskResultEncoder, Task}
|
||||||
import docspell.store.records.RFileMeta
|
import docspell.store.records.RFileMeta
|
||||||
|
|
||||||
import io.circe.Encoder
|
import io.circe.Encoder
|
||||||
import io.circe.generic.semiauto.deriveEncoder
|
import io.circe.generic.semiauto.deriveEncoder
|
||||||
|
|
||||||
object FileIntegrityCheckTask {
|
object FileIntegrityCheckTask {
|
||||||
type Args = FileIntegrityCheckArgs
|
type Args = FileIntegrityCheckArgs
|
||||||
|
|
||||||
case class Result(ok: Int, failedKeys: Set[FileKey]) {
|
case class Result(ok: Int, failedKeys: Set[FileKey], notFoundKeys: Set[FileKey]) {
|
||||||
override def toString: String =
|
override def toString: String =
|
||||||
s"Result(ok=$ok, failed=${failedKeys.size}, keysFailed=$failedKeys)"
|
s"Result(ok=$ok, failed=${failedKeys.size}, notFound=${notFoundKeys.size}, " +
|
||||||
|
s"keysFailed=$failedKeys, notFoundKeys=$notFoundKeys)"
|
||||||
}
|
}
|
||||||
object Result {
|
object Result {
|
||||||
val empty = Result(0, Set.empty)
|
val empty = Result(0, Set.empty, Set.empty)
|
||||||
|
|
||||||
|
def notFound(key: FileKey) = Result(0, Set.empty, Set(key))
|
||||||
|
|
||||||
def from(r: IntegrityResult): Result =
|
def from(r: IntegrityResult): Result =
|
||||||
if (r.ok) Result(1, Set.empty) else Result(0, Set(r.key))
|
if (r.ok) Result(1, Set.empty, Set.empty) else Result(0, Set(r.key), Set.empty)
|
||||||
|
|
||||||
implicit val monoid: Monoid[Result] =
|
implicit val monoid: Monoid[Result] =
|
||||||
Monoid.instance(empty, (a, b) => Result(a.ok + b.ok, a.failedKeys ++ b.failedKeys))
|
Monoid.instance(
|
||||||
|
empty,
|
||||||
|
(a, b) =>
|
||||||
|
Result(
|
||||||
|
a.ok + b.ok,
|
||||||
|
a.failedKeys ++ b.failedKeys,
|
||||||
|
a.notFoundKeys ++ b.notFoundKeys
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
implicit val jsonEncoder: Encoder[Result] =
|
implicit val jsonEncoder: Encoder[Result] =
|
||||||
deriveEncoder
|
deriveEncoder
|
||||||
|
|
||||||
|
implicit val jobTaskResultEncoder: JobTaskResultEncoder[Result] =
|
||||||
|
JobTaskResultEncoder.fromJson[Result].withMessage { result =>
|
||||||
|
s"Integrity check finished. Ok: ${result.ok}, " +
|
||||||
|
s"Failed: ${result.failedKeys.size}, Not found: ${result.notFoundKeys.size}"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def apply[F[_]: Sync](ops: OFileRepository[F]): Task[F, Args, Result] =
|
def apply[F[_]: Sync](ops: OFileRepository[F]): Task[F, Args, Result] =
|
||||||
@ -49,13 +64,16 @@ object FileIntegrityCheckTask {
|
|||||||
.chunks
|
.chunks
|
||||||
.evalTap(c => ctx.logger.info(s"Checking next ${c.size} files…"))
|
.evalTap(c => ctx.logger.info(s"Checking next ${c.size} files…"))
|
||||||
.unchunks
|
.unchunks
|
||||||
.evalMap(meta => ops.checkIntegrity(meta.id, meta.checksum.some))
|
.evalMap(meta =>
|
||||||
.evalMap {
|
ops.checkIntegrity(meta.id, meta.checksum.some).flatMap {
|
||||||
case Some(r) =>
|
case Some(r) =>
|
||||||
Result.from(r).pure[F]
|
Result.from(r).pure[F]
|
||||||
case None =>
|
case None =>
|
||||||
ctx.logger.error(s"File not found").as(Result.empty)
|
ctx.logger
|
||||||
}
|
.error(s"File '${meta.id.toString}' not found in file repository")
|
||||||
|
.as(Result.notFound(meta.id))
|
||||||
|
}
|
||||||
|
)
|
||||||
.foldMonoid
|
.foldMonoid
|
||||||
.compile
|
.compile
|
||||||
.lastOrError
|
.lastOrError
|
||||||
@ -67,5 +85,4 @@ object FileIntegrityCheckTask {
|
|||||||
|
|
||||||
def onCancel[F[_]]: Task[F, Args, Unit] =
|
def onCancel[F[_]]: Task[F, Args, Unit] =
|
||||||
Task.log(_.warn(s"Cancelling ${FileIntegrityCheckArgs.taskName.id} task"))
|
Task.log(_.warn(s"Cancelling ${FileIntegrityCheckArgs.taskName.id} task"))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ package docspell.joex.process
|
|||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.joex.process.ItemData.AttachmentDates
|
import docspell.joex.process.ItemData.AttachmentDates
|
||||||
|
import docspell.joex.scheduler.JobTaskResultEncoder
|
||||||
import docspell.store.records.{RAttachment, RAttachmentMeta, RItem}
|
import docspell.store.records.{RAttachment, RAttachmentMeta, RItem}
|
||||||
|
|
||||||
import io.circe.syntax.EncoderOps
|
import io.circe.syntax.EncoderOps
|
||||||
@ -118,7 +119,28 @@ object ItemData {
|
|||||||
)
|
)
|
||||||
.asJson,
|
.asJson,
|
||||||
"tags" -> data.tags.asJson,
|
"tags" -> data.tags.asJson,
|
||||||
"assumedTags" -> data.classifyTags.asJson
|
"assumedTags" -> data.classifyTags.asJson,
|
||||||
|
"assumedCorrOrg" -> data.finalProposals
|
||||||
|
.find(MetaProposalType.CorrOrg)
|
||||||
|
.map(_.values.head.ref)
|
||||||
|
.asJson
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
implicit val jobTaskResultEncoder: JobTaskResultEncoder[ItemData] =
|
||||||
|
JobTaskResultEncoder.fromJson[ItemData].withMessage { data =>
|
||||||
|
val tags =
|
||||||
|
if (data.tags.isEmpty && data.classifyTags.isEmpty) ""
|
||||||
|
else (data.tags ++ data.classifyTags).mkString("[", ", ", "]")
|
||||||
|
|
||||||
|
val corg =
|
||||||
|
data.finalProposals.find(MetaProposalType.CorrOrg).map(_.values.head.ref.name)
|
||||||
|
val cpers =
|
||||||
|
data.finalProposals.find(MetaProposalType.CorrPerson).map(_.values.head.ref.name)
|
||||||
|
val org = corg match {
|
||||||
|
case Some(o) => s" by $o" + cpers.map(p => s"/$p").getOrElse("")
|
||||||
|
case None => cpers.map(p => s" by $p").getOrElse("")
|
||||||
|
}
|
||||||
|
s"Processed '${data.item.name}' $tags$org"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -12,7 +12,7 @@ import cats.implicits._
|
|||||||
import docspell.common.Ident
|
import docspell.common.Ident
|
||||||
import docspell.common.syntax.all._
|
import docspell.common.syntax.all._
|
||||||
|
|
||||||
import io.circe.{Decoder, Encoder, Json}
|
import io.circe.Decoder
|
||||||
|
|
||||||
/** Binds a Task to a name. This is required to lookup the code based on the taskName in
|
/** Binds a Task to a name. This is required to lookup the code based on the taskName in
|
||||||
* the RJob data and to execute it given the arguments that have to be read from a
|
* the RJob data and to execute it given the arguments that have to be read from a
|
||||||
@ -24,7 +24,7 @@ import io.circe.{Decoder, Encoder, Json}
|
|||||||
*/
|
*/
|
||||||
case class JobTask[F[_]](
|
case class JobTask[F[_]](
|
||||||
name: Ident,
|
name: Ident,
|
||||||
task: Task[F, String, Json],
|
task: Task[F, String, JobTaskResult],
|
||||||
onCancel: Task[F, String, Unit]
|
onCancel: Task[F, String, Unit]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -36,7 +36,7 @@ object JobTask {
|
|||||||
onCancel: Task[F, A, Unit]
|
onCancel: Task[F, A, Unit]
|
||||||
)(implicit
|
)(implicit
|
||||||
D: Decoder[A],
|
D: Decoder[A],
|
||||||
E: Encoder[B]
|
E: JobTaskResultEncoder[B]
|
||||||
): JobTask[F] = {
|
): JobTask[F] = {
|
||||||
val convert: String => F[A] =
|
val convert: String => F[A] =
|
||||||
str =>
|
str =>
|
||||||
@ -46,6 +46,6 @@ object JobTask {
|
|||||||
Sync[F].raiseError(new Exception(s"Cannot parse task arguments: $str", ex))
|
Sync[F].raiseError(new Exception(s"Cannot parse task arguments: $str", ex))
|
||||||
}
|
}
|
||||||
|
|
||||||
JobTask(name, task.contramap(convert).map(E.apply), onCancel.contramap(convert))
|
JobTask(name, task.contramap(convert).map(E.encode), onCancel.contramap(convert))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,27 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.joex.scheduler
|
||||||
|
|
||||||
|
import io.circe.Json
|
||||||
|
|
||||||
|
final case class JobTaskResult(message: Option[String], json: Option[Json]) {
|
||||||
|
|
||||||
|
def withMessage(m: String): JobTaskResult =
|
||||||
|
copy(message = Some(m))
|
||||||
|
|
||||||
|
def withJson(json: Json): JobTaskResult =
|
||||||
|
copy(json = Some(json))
|
||||||
|
}
|
||||||
|
|
||||||
|
object JobTaskResult {
|
||||||
|
|
||||||
|
val empty: JobTaskResult = JobTaskResult(None, None)
|
||||||
|
|
||||||
|
def message(msg: String): JobTaskResult = JobTaskResult(Some(msg), None)
|
||||||
|
|
||||||
|
def json(json: Json): JobTaskResult = JobTaskResult(None, Some(json))
|
||||||
|
}
|
@ -0,0 +1,49 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.joex.scheduler
|
||||||
|
|
||||||
|
import docspell.joex.scheduler.JobTaskResultEncoder.instance
|
||||||
|
|
||||||
|
import io.circe.Encoder
|
||||||
|
|
||||||
|
trait JobTaskResultEncoder[A] { self =>
|
||||||
|
def encode(a: A): JobTaskResult
|
||||||
|
|
||||||
|
final def contramap[B](f: B => A): JobTaskResultEncoder[B] =
|
||||||
|
JobTaskResultEncoder.instance(b => self.encode(f(b)))
|
||||||
|
|
||||||
|
final def map(f: JobTaskResult => JobTaskResult): JobTaskResultEncoder[A] =
|
||||||
|
instance(a => f(self.encode(a)))
|
||||||
|
|
||||||
|
final def modify(f: (A, JobTaskResult) => JobTaskResult): JobTaskResultEncoder[A] =
|
||||||
|
instance(a => f(a, self.encode(a)))
|
||||||
|
|
||||||
|
final def withMessage(f: A => String): JobTaskResultEncoder[A] =
|
||||||
|
modify((a, r) => r.withMessage(f(a)))
|
||||||
|
}
|
||||||
|
|
||||||
|
object JobTaskResultEncoder {
|
||||||
|
|
||||||
|
def apply[A](implicit v: JobTaskResultEncoder[A]): JobTaskResultEncoder[A] = v
|
||||||
|
|
||||||
|
def instance[A](f: A => JobTaskResult): JobTaskResultEncoder[A] =
|
||||||
|
(a: A) => f(a)
|
||||||
|
|
||||||
|
def fromJson[A: Encoder]: JobTaskResultEncoder[A] =
|
||||||
|
instance(a => JobTaskResult.json(Encoder[A].apply(a)))
|
||||||
|
|
||||||
|
implicit val unitJobTaskResultEncoder: JobTaskResultEncoder[Unit] =
|
||||||
|
instance(_ => JobTaskResult.empty)
|
||||||
|
|
||||||
|
implicit def optionJobTaskResultEncoder[A](implicit
|
||||||
|
ea: JobTaskResultEncoder[A]
|
||||||
|
): JobTaskResultEncoder[Option[A]] =
|
||||||
|
instance {
|
||||||
|
case Some(a) => ea.encode(a)
|
||||||
|
case None => JobTaskResult.empty
|
||||||
|
}
|
||||||
|
}
|
@ -167,7 +167,7 @@ final class SchedulerImpl[F[_]: Async](
|
|||||||
ctx <- Context[F, String](job, job.args, config, logSink, store)
|
ctx <- Context[F, String](job, job.args, config, logSink, store)
|
||||||
_ <- t.onCancel.run(ctx)
|
_ <- t.onCancel.run(ctx)
|
||||||
_ <- state.modify(_.markCancelled(job))
|
_ <- state.modify(_.markCancelled(job))
|
||||||
_ <- onFinish(job, Json.Null, JobState.Cancelled)
|
_ <- onFinish(job, JobTaskResult.empty, JobState.Cancelled)
|
||||||
_ <- ctx.logger.warn("Job has been cancelled.")
|
_ <- ctx.logger.warn("Job has been cancelled.")
|
||||||
_ <- logger.debug(s"Job ${job.info} has been cancelled.")
|
_ <- logger.debug(s"Job ${job.info} has been cancelled.")
|
||||||
} yield ()
|
} yield ()
|
||||||
@ -196,7 +196,7 @@ final class SchedulerImpl[F[_]: Async](
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def onFinish(job: RJob, result: Json, finishState: JobState): F[Unit] =
|
def onFinish(job: RJob, result: JobTaskResult, finishState: JobState): F[Unit] =
|
||||||
for {
|
for {
|
||||||
_ <- logger.debug(s"Job ${job.info} done $finishState. Releasing resources.")
|
_ <- logger.debug(s"Job ${job.info} done $finishState. Releasing resources.")
|
||||||
_ <- permits.release *> permits.available.flatMap(a =>
|
_ <- permits.release *> permits.available.flatMap(a =>
|
||||||
@ -220,7 +220,8 @@ final class SchedulerImpl[F[_]: Async](
|
|||||||
job.state,
|
job.state,
|
||||||
job.subject,
|
job.subject,
|
||||||
job.submitter,
|
job.submitter,
|
||||||
result
|
result.json.getOrElse(Json.Null),
|
||||||
|
result.message
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -235,7 +236,7 @@ final class SchedulerImpl[F[_]: Async](
|
|||||||
|
|
||||||
def wrapTask(
|
def wrapTask(
|
||||||
job: RJob,
|
job: RJob,
|
||||||
task: Task[F, String, Json],
|
task: Task[F, String, JobTaskResult],
|
||||||
ctx: Context[F, String]
|
ctx: Context[F, String]
|
||||||
): Task[F, String, Unit] =
|
): Task[F, String, Unit] =
|
||||||
task
|
task
|
||||||
@ -250,19 +251,19 @@ final class SchedulerImpl[F[_]: Async](
|
|||||||
case true =>
|
case true =>
|
||||||
logger.error(ex)(s"Job ${job.info} execution failed (cancel = true)")
|
logger.error(ex)(s"Job ${job.info} execution failed (cancel = true)")
|
||||||
ctx.logger.error(ex)("Job execution failed (cancel = true)") *>
|
ctx.logger.error(ex)("Job execution failed (cancel = true)") *>
|
||||||
(JobState.Cancelled: JobState, Json.Null).pure[F]
|
(JobState.Cancelled: JobState, JobTaskResult.empty).pure[F]
|
||||||
case false =>
|
case false =>
|
||||||
QJob.exceedsRetries(job.id, config.retries, store).flatMap {
|
QJob.exceedsRetries(job.id, config.retries, store).flatMap {
|
||||||
case true =>
|
case true =>
|
||||||
logger.error(ex)(s"Job ${job.info} execution failed. Retries exceeded.")
|
logger.error(ex)(s"Job ${job.info} execution failed. Retries exceeded.")
|
||||||
ctx.logger
|
ctx.logger
|
||||||
.error(ex)(s"Job ${job.info} execution failed. Retries exceeded.")
|
.error(ex)(s"Job ${job.info} execution failed. Retries exceeded.")
|
||||||
.map(_ => (JobState.Failed: JobState, Json.Null))
|
.map(_ => (JobState.Failed: JobState, JobTaskResult.empty))
|
||||||
case false =>
|
case false =>
|
||||||
logger.error(ex)(s"Job ${job.info} execution failed. Retrying later.")
|
logger.error(ex)(s"Job ${job.info} execution failed. Retrying later.")
|
||||||
ctx.logger
|
ctx.logger
|
||||||
.error(ex)(s"Job ${job.info} execution failed. Retrying later.")
|
.error(ex)(s"Job ${job.info} execution failed. Retrying later.")
|
||||||
.map(_ => (JobState.Stuck: JobState, Json.Null))
|
.map(_ => (JobState.Stuck: JobState, JobTaskResult.empty))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -273,7 +274,7 @@ final class SchedulerImpl[F[_]: Async](
|
|||||||
logger.error(ex)(s"Error happened during post-processing of ${job.info}!")
|
logger.error(ex)(s"Error happened during post-processing of ${job.info}!")
|
||||||
// we don't know the real outcome here…
|
// we don't know the real outcome here…
|
||||||
// since tasks should be idempotent, set it to stuck. if above has failed, this might fail anyways
|
// since tasks should be idempotent, set it to stuck. if above has failed, this might fail anyways
|
||||||
onFinish(job, Json.Null, JobState.Stuck)
|
onFinish(job, JobTaskResult.empty, JobState.Stuck)
|
||||||
})
|
})
|
||||||
|
|
||||||
def forkRun(
|
def forkRun(
|
||||||
@ -295,7 +296,7 @@ final class SchedulerImpl[F[_]: Async](
|
|||||||
()
|
()
|
||||||
} *>
|
} *>
|
||||||
state.modify(_.markCancelled(job)) *>
|
state.modify(_.markCancelled(job)) *>
|
||||||
onFinish(job, Json.Null, JobState.Cancelled) *>
|
onFinish(job, JobTaskResult.empty, JobState.Cancelled) *>
|
||||||
ctx.logger.warn("Job has been cancelled.") *>
|
ctx.logger.warn("Job has been cancelled.") *>
|
||||||
logger.debug(s"Job ${job.info} has been cancelled.")
|
logger.debug(s"Job ${job.info} has been cancelled.")
|
||||||
)
|
)
|
||||||
|
@ -204,7 +204,8 @@ object Event {
|
|||||||
state: JobState,
|
state: JobState,
|
||||||
subject: String,
|
subject: String,
|
||||||
submitter: Ident,
|
submitter: Ident,
|
||||||
result: Json
|
resultData: Json,
|
||||||
|
resultMsg: Option[String]
|
||||||
) extends Event {
|
) extends Event {
|
||||||
val eventType = JobDone
|
val eventType = JobDone
|
||||||
val baseUrl = None
|
val baseUrl = None
|
||||||
@ -222,7 +223,8 @@ object Event {
|
|||||||
JobState.running,
|
JobState.running,
|
||||||
"Process 3 files",
|
"Process 3 files",
|
||||||
account.user,
|
account.user,
|
||||||
Json.Null
|
Json.Null,
|
||||||
|
None
|
||||||
)
|
)
|
||||||
} yield ev
|
} yield ev
|
||||||
}
|
}
|
||||||
|
@ -31,30 +31,25 @@ trait EventContext {
|
|||||||
"content" -> content
|
"content" -> content
|
||||||
)
|
)
|
||||||
|
|
||||||
def defaultTitle: Either[String, String]
|
def defaultMessage: Either[String, EventMessage]
|
||||||
def defaultTitleHtml: Either[String, String]
|
def defaultMessageHtml: Either[String, EventMessage]
|
||||||
|
|
||||||
def defaultBody: Either[String, String]
|
|
||||||
def defaultBodyHtml: Either[String, String]
|
|
||||||
|
|
||||||
def defaultBoth: Either[String, String]
|
def defaultBoth: Either[String, String]
|
||||||
def defaultBothHtml: Either[String, String]
|
def defaultBothHtml: Either[String, String]
|
||||||
|
|
||||||
lazy val asJsonWithMessage: Either[String, Json] =
|
lazy val asJsonWithMessage: Either[String, Json] =
|
||||||
for {
|
for {
|
||||||
tt1 <- defaultTitle
|
dm1 <- defaultMessage
|
||||||
tb1 <- defaultBody
|
dm2 <- defaultMessageHtml
|
||||||
tt2 <- defaultTitleHtml
|
|
||||||
tb2 <- defaultBodyHtml
|
|
||||||
data = asJson
|
data = asJson
|
||||||
msg = Json.obj(
|
msg = Json.obj(
|
||||||
"message" -> Json.obj(
|
"message" -> Json.obj(
|
||||||
"title" -> tt1.asJson,
|
"title" -> dm1.title.asJson,
|
||||||
"body" -> tb1.asJson
|
"body" -> dm1.body.asJson
|
||||||
),
|
),
|
||||||
"messageHtml" -> Json.obj(
|
"messageHtml" -> Json.obj(
|
||||||
"title" -> tt2.asJson,
|
"title" -> dm2.title.asJson,
|
||||||
"body" -> tb2.asJson
|
"body" -> dm2.body.asJson
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
} yield data.withObject(o1 => msg.withObject(o2 => o1.deepMerge(o2).asJson))
|
} yield data.withObject(o1 => msg.withObject(o2 => o1.deepMerge(o2).asJson))
|
||||||
@ -65,10 +60,8 @@ object EventContext {
|
|||||||
new EventContext {
|
new EventContext {
|
||||||
val event = ev
|
val event = ev
|
||||||
def content = Json.obj()
|
def content = Json.obj()
|
||||||
def defaultTitle = Right("")
|
def defaultMessage = Right(EventMessage.empty)
|
||||||
def defaultTitleHtml = Right("")
|
def defaultMessageHtml = Right(EventMessage.empty)
|
||||||
def defaultBody = Right("")
|
|
||||||
def defaultBodyHtml = Right("")
|
|
||||||
def defaultBoth = Right("")
|
def defaultBoth = Right("")
|
||||||
def defaultBothHtml = Right("")
|
def defaultBothHtml = Right("")
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,13 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.notification.api
|
||||||
|
|
||||||
|
final case class EventMessage(title: String, body: String)
|
||||||
|
|
||||||
|
object EventMessage {
|
||||||
|
val empty: EventMessage = EventMessage("", "")
|
||||||
|
}
|
@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
package docspell.notification.impl
|
package docspell.notification.impl
|
||||||
|
|
||||||
import docspell.notification.api.EventContext
|
import docspell.notification.api.{EventContext, EventMessage}
|
||||||
|
|
||||||
import yamusca.circe._
|
import yamusca.circe._
|
||||||
import yamusca.implicits._
|
import yamusca.implicits._
|
||||||
@ -24,17 +24,17 @@ abstract class AbstractEventContext extends EventContext {
|
|||||||
def renderHtml(template: Template): String =
|
def renderHtml(template: Template): String =
|
||||||
Markdown.toHtml(render(template))
|
Markdown.toHtml(render(template))
|
||||||
|
|
||||||
lazy val defaultTitle: Either[String, String] =
|
lazy val defaultMessage: Either[String, EventMessage] =
|
||||||
titleTemplate.map(render)
|
for {
|
||||||
|
title <- titleTemplate.map(render)
|
||||||
|
body <- bodyTemplate.map(render)
|
||||||
|
} yield EventMessage(title, body)
|
||||||
|
|
||||||
lazy val defaultTitleHtml: Either[String, String] =
|
lazy val defaultMessageHtml: Either[String, EventMessage] =
|
||||||
titleTemplate.map(renderHtml)
|
for {
|
||||||
|
title <- titleTemplate.map(renderHtml)
|
||||||
lazy val defaultBody: Either[String, String] =
|
body <- bodyTemplate.map(renderHtml)
|
||||||
bodyTemplate.map(render)
|
} yield EventMessage(title, body)
|
||||||
|
|
||||||
lazy val defaultBodyHtml: Either[String, String] =
|
|
||||||
bodyTemplate.map(renderHtml)
|
|
||||||
|
|
||||||
lazy val defaultBoth: Either[String, String] =
|
lazy val defaultBoth: Either[String, String] =
|
||||||
for {
|
for {
|
||||||
|
@ -18,8 +18,9 @@ trait EventContextSyntax {
|
|||||||
implicit final class EventContextOps(self: EventContext) {
|
implicit final class EventContextOps(self: EventContext) {
|
||||||
def withDefault[F[_]](logger: Logger[F])(f: (String, String) => F[Unit]): F[Unit] =
|
def withDefault[F[_]](logger: Logger[F])(f: (String, String) => F[Unit]): F[Unit] =
|
||||||
(for {
|
(for {
|
||||||
tt <- self.defaultTitle
|
dm <- self.defaultMessage
|
||||||
tb <- self.defaultBody
|
tt = dm.title
|
||||||
|
tb = dm.body
|
||||||
} yield f(tt, tb)).fold(logError(logger), identity)
|
} yield f(tt, tb)).fold(logError(logger), identity)
|
||||||
|
|
||||||
def withJsonMessage[F[_]](logger: Logger[F])(f: Json => F[Unit]): F[Unit] =
|
def withJsonMessage[F[_]](logger: Logger[F])(f: Json => F[Unit]): F[Unit] =
|
||||||
|
@ -23,9 +23,14 @@ final case class JobDoneCtx(event: Event.JobDone, data: JobDoneCtx.Data)
|
|||||||
val content = data.asJson
|
val content = data.asJson
|
||||||
|
|
||||||
val titleTemplate = Right(mustache"{{eventType}} (by *{{account.user}}*)")
|
val titleTemplate = Right(mustache"{{eventType}} (by *{{account.user}}*)")
|
||||||
val bodyTemplate = Right(
|
val bodyTemplate =
|
||||||
mustache"""{{#content}}_'{{subject}}'_ finished {{/content}}"""
|
data.resultMsg match {
|
||||||
)
|
case None =>
|
||||||
|
Right(mustache"""{{#content}}_'{{subject}}'_ finished {{/content}}""")
|
||||||
|
case Some(msg) =>
|
||||||
|
val tpl = s"""{{#content}}$msg{{/content}}"""
|
||||||
|
yamusca.imports.mustache.parse(tpl).left.map(_._2)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
object JobDoneCtx {
|
object JobDoneCtx {
|
||||||
@ -46,7 +51,8 @@ object JobDoneCtx {
|
|||||||
state: JobState,
|
state: JobState,
|
||||||
subject: String,
|
subject: String,
|
||||||
submitter: Ident,
|
submitter: Ident,
|
||||||
result: Json
|
resultData: Json,
|
||||||
|
resultMsg: Option[String]
|
||||||
)
|
)
|
||||||
object Data {
|
object Data {
|
||||||
implicit val jsonEncoder: Encoder[Data] =
|
implicit val jsonEncoder: Encoder[Data] =
|
||||||
@ -61,7 +67,8 @@ object JobDoneCtx {
|
|||||||
ev.state,
|
ev.state,
|
||||||
ev.subject,
|
ev.subject,
|
||||||
ev.submitter,
|
ev.submitter,
|
||||||
ev.result
|
ev.resultData,
|
||||||
|
ev.resultMsg
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -46,9 +46,10 @@ class TagsChangedCtxTest extends FunSuite {
|
|||||||
TagsChangedCtx.Data(account, List(item), List(tag), Nil, url.some.map(_.asString))
|
TagsChangedCtx.Data(account, List(item), List(tag), Nil, url.some.map(_.asString))
|
||||||
)
|
)
|
||||||
|
|
||||||
assertEquals(ctx.defaultTitle.toOption.get, "TagsChanged (by *user2*)")
|
val dm = ctx.defaultMessage.toOption.get
|
||||||
|
assertEquals(dm.title, "TagsChanged (by *user2*)")
|
||||||
assertEquals(
|
assertEquals(
|
||||||
ctx.defaultBody.toOption.get,
|
dm.body,
|
||||||
"Adding *tag-red* on [`Report 2`](http://test/item-1)."
|
"Adding *tag-red* on [`Report 2`](http://test/item-1)."
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -65,9 +66,10 @@ class TagsChangedCtxTest extends FunSuite {
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
assertEquals(ctx.defaultTitle.toOption.get, "TagsChanged (by *user2*)")
|
val dm = ctx.defaultMessage.toOption.get
|
||||||
|
assertEquals(dm.title, "TagsChanged (by *user2*)")
|
||||||
assertEquals(
|
assertEquals(
|
||||||
ctx.defaultBody.toOption.get,
|
dm.body,
|
||||||
"Adding *tag-red*; Removing *tag-blue* on [`Report 2`](http://test/item-1)."
|
"Adding *tag-red*; Removing *tag-blue* on [`Report 2`](http://test/item-1)."
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -2516,6 +2516,30 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/BasicResult"
|
$ref: "#/components/schemas/BasicResult"
|
||||||
|
|
||||||
|
/admin/files/integrityCheck:
|
||||||
|
post:
|
||||||
|
operationId: "admin-files-integrityCheck"
|
||||||
|
tags: [ Admin ]
|
||||||
|
summary: Verifies the stored checksum
|
||||||
|
description: |
|
||||||
|
Submits a task that goes through the files and compares the
|
||||||
|
stored checksum (at the time of inserting) against a newly
|
||||||
|
calculated one.
|
||||||
|
security:
|
||||||
|
- adminHeader: []
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/FileIntegrityCheckRequest"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/BasicResult"
|
||||||
|
|
||||||
/sec/source:
|
/sec/source:
|
||||||
get:
|
get:
|
||||||
operationId: "sec-source-get-all"
|
operationId: "sec-source-get-all"
|
||||||
@ -5462,6 +5486,14 @@ paths:
|
|||||||
|
|
||||||
components:
|
components:
|
||||||
schemas:
|
schemas:
|
||||||
|
FileIntegrityCheckRequest:
|
||||||
|
description: |
|
||||||
|
Data for running a file integrity check
|
||||||
|
properties:
|
||||||
|
collective:
|
||||||
|
type: string
|
||||||
|
format: ident
|
||||||
|
|
||||||
FileRepositoryCloneRequest:
|
FileRepositoryCloneRequest:
|
||||||
description: |
|
description: |
|
||||||
Clone the file repository to a new location.
|
Clone the file repository to a new location.
|
||||||
|
@ -43,7 +43,7 @@ object FileRepositoryRoutes {
|
|||||||
resp <- Ok(result)
|
resp <- Ok(result)
|
||||||
} yield resp
|
} yield resp
|
||||||
|
|
||||||
case req @ POST -> Root / "integrityCheckAll" =>
|
case req @ POST -> Root / "integrityCheck" =>
|
||||||
for {
|
for {
|
||||||
input <- req.as[FileKeyPart]
|
input <- req.as[FileKeyPart]
|
||||||
job <- backend.fileRepository.checkIntegrityAll(input, true)
|
job <- backend.fileRepository.checkIntegrityAll(input, true)
|
||||||
|
Reference in New Issue
Block a user