Merge pull request #1214 from eikek/feature/notifications

Add support for more generic notification
This commit is contained in:
mergify[bot] 2021-12-11 18:13:51 +00:00 committed by GitHub
commit 61379ffff7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
175 changed files with 13041 additions and 599 deletions

View File

@ -0,0 +1,5 @@
# This file instructs Redocly's linter to ignore the rules contained for specific parts of your API.
# See https://redoc.ly/docs/cli/ for more information.
modules/restapi/src/main/resources/docspell-openapi.yml:
spec:
- '#/extraSchemas'

View File

@ -20,7 +20,7 @@ val scalafixSettings = Seq(
val sharedSettings = Seq(
organization := "com.github.eikek",
scalaVersion := "2.13.6",
scalaVersion := "2.13.7",
organizationName := "Eike K. & Contributors",
licenses += ("AGPL-3.0-or-later", url(
"https://spdx.org/licenses/AGPL-3.0-or-later.html"
@ -41,7 +41,8 @@ val sharedSettings = Seq(
"-Wdead-code",
"-Wunused",
"-Wvalue-discard",
"-Wnumeric-widen"
"-Wnumeric-widen",
"-Ywarn-macros:after"
),
javacOptions ++= Seq("-target", "1.8", "-source", "1.8"),
LocalRootProject / toolsPackage := {
@ -272,6 +273,22 @@ val openapiScalaSettings = Seq(
)
)
)
case "channeltype" =>
field =>
field.copy(typeDef =
TypeDef("ChannelType", Imports("docspell.notification.api.ChannelType"))
)
case "eventtype" =>
field =>
field.copy(typeDef =
TypeDef("EventType", Imports("docspell.notification.api.EventType"))
)
case "jsonminiq" =>
field =>
field.copy(typeDef =
TypeDef("JsonMiniQuery", Imports("docspell.jsonminiq.JsonMiniQuery"))
)
})
)
@ -385,6 +402,34 @@ val totp = project
Dependencies.circe
)
val jsonminiq = project
.in(file("modules/jsonminiq"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.settings(
name := "docspell-jsonminiq",
libraryDependencies ++=
Dependencies.circeCore ++
Dependencies.catsParse ++
Dependencies.circe.map(_ % Test)
)
val notificationApi = project
.in(file("modules/notification/api"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.settings(
name := "docspell-notification-api",
addCompilerPlugin(Dependencies.kindProjectorPlugin),
libraryDependencies ++=
Dependencies.fs2 ++
Dependencies.emilCommon ++
Dependencies.circeGenericExtra
)
.dependsOn(common)
val store = project
.in(file("modules/store"))
.disablePlugins(RevolverPlugin)
@ -408,7 +453,27 @@ val store = project
libraryDependencies ++=
Dependencies.testContainer.map(_ % Test)
)
.dependsOn(common, query.jvm, totp, files)
.dependsOn(common, query.jvm, totp, files, notificationApi, jsonminiq)
val notificationImpl = project
.in(file("modules/notification/impl"))
.disablePlugins(RevolverPlugin)
.settings(sharedSettings)
.settings(testSettingsMUnit)
.settings(
name := "docspell-notification-impl",
addCompilerPlugin(Dependencies.kindProjectorPlugin),
libraryDependencies ++=
Dependencies.fs2 ++
Dependencies.emil ++
Dependencies.emilMarkdown ++
Dependencies.http4sClient ++
Dependencies.http4sCirce ++
Dependencies.http4sDsl ++
Dependencies.yamusca ++
Dependencies.yamuscaCirce
)
.dependsOn(notificationApi, store, jsonminiq)
val pubsubApi = project
.in(file("modules/pubsub/api"))
@ -522,13 +587,13 @@ val restapi = project
.settings(
name := "docspell-restapi",
libraryDependencies ++=
Dependencies.circe,
Dependencies.circe ++ Dependencies.emil,
openapiTargetLanguage := Language.Scala,
openapiPackage := Pkg("docspell.restapi.model"),
openapiSpec := (Compile / resourceDirectory).value / "docspell-openapi.yml",
openapiStaticGen := OpenApiDocGenerator.Redoc
)
.dependsOn(common, query.jvm)
.dependsOn(common, query.jvm, notificationApi, jsonminiq)
val joexapi = project
.in(file("modules/joexapi"))
@ -564,7 +629,7 @@ val backend = project
Dependencies.http4sClient ++
Dependencies.emil
)
.dependsOn(store, joexapi, ftsclient, totp, pubsubApi)
.dependsOn(store, notificationApi, joexapi, ftsclient, totp, pubsubApi)
val oidc = project
.in(file("modules/oidc"))
@ -656,7 +721,8 @@ val joex = project
joexapi,
restapi,
ftssolr,
pubsubNaive
pubsubNaive,
notificationImpl
)
val restserver = project
@ -720,7 +786,17 @@ val restserver = project
}
}
)
.dependsOn(config, restapi, joexapi, backend, webapp, ftssolr, oidc, pubsubNaive)
.dependsOn(
config,
restapi,
joexapi,
backend,
webapp,
ftssolr,
oidc,
pubsubNaive,
notificationImpl
)
// --- Website Documentation
@ -811,10 +887,13 @@ val root = project
restserver,
query.jvm,
query.js,
jsonminiq,
totp,
oidc,
pubsubApi,
pubsubNaive
pubsubNaive,
notificationApi,
notificationImpl
)
// --- Helpers

View File

@ -0,0 +1,44 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.backend
import docspell.common._
import docspell.notification.api.Event
trait AttachedEvent[R] {
def value: R
def event(account: AccountId, baseUrl: Option[LenientUri]): Iterable[Event]
def map[U](f: R => U): AttachedEvent[U]
}
object AttachedEvent {
def only[R](v: R): AttachedEvent[R] =
new AttachedEvent[R] {
val value = v
def event(account: AccountId, baseUrl: Option[LenientUri]): Iterable[Event] =
Iterable.empty[Event]
def map[U](f: R => U): AttachedEvent[U] =
only(f(v))
}
def apply[R](
v: R
)(mkEvent: (AccountId, Option[LenientUri]) => Event): AttachedEvent[R] =
new AttachedEvent[R] {
val value = v
def event(account: AccountId, baseUrl: Option[LenientUri]): Iterable[Event] =
Some(mkEvent(account, baseUrl))
def map[U](f: R => U): AttachedEvent[U] =
apply(f(v))(mkEvent)
}
}

View File

@ -14,12 +14,13 @@ import docspell.backend.msg.JobQueuePublish
import docspell.backend.ops._
import docspell.backend.signup.OSignup
import docspell.ftsclient.FtsClient
import docspell.notification.api.{EventExchange, NotificationModule}
import docspell.pubsub.api.PubSubT
import docspell.store.Store
import docspell.store.usertask.UserTaskStore
import docspell.totp.Totp
import emil.javamail.{JavaMailEmil, Settings}
import emil.Emil
trait BackendApp[F[_]] {
@ -46,19 +47,22 @@ trait BackendApp[F[_]] {
def totp: OTotp[F]
def share: OShare[F]
def pubSub: PubSubT[F]
def events: EventExchange[F]
def notification: ONotification[F]
}
object BackendApp {
def create[F[_]: Async](
cfg: Config,
store: Store[F],
javaEmil: Emil[F],
ftsClient: FtsClient[F],
pubSubT: PubSubT[F]
pubSubT: PubSubT[F],
notificationMod: NotificationModule[F]
): Resource[F, BackendApp[F]] =
for {
utStore <- UserTaskStore(store)
queue <- JobQueuePublish(store, pubSubT)
queue <- JobQueuePublish(store, pubSubT, notificationMod)
totpImpl <- OTotp(store, Totp.default)
loginImpl <- Login[F](store, Totp.default)
signupImpl <- OSignup[F](store)
@ -75,8 +79,6 @@ object BackendApp {
itemImpl <- OItem(store, ftsClient, createIndex, queue, joexImpl)
itemSearchImpl <- OItemSearch(store)
fulltextImpl <- OFulltext(itemSearchImpl, ftsClient, store, queue, joexImpl)
javaEmil =
JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug))
mailImpl <- OMail(store, javaEmil)
userTaskImpl <- OUserTask(utStore, queue, joexImpl)
folderImpl <- OFolder(store)
@ -86,6 +88,7 @@ object BackendApp {
shareImpl <- Resource.pure(
OShare(store, itemSearchImpl, simpleSearchImpl, javaEmil)
)
notifyImpl <- ONotification(store, notificationMod)
} yield new BackendApp[F] {
val pubSub = pubSubT
val login = loginImpl
@ -110,5 +113,7 @@ object BackendApp {
val clientSettings = clientSettingsImpl
val totp = totpImpl
val share = shareImpl
val events = notificationMod
val notification = notifyImpl
}
}

View File

@ -10,12 +10,18 @@ import docspell.backend.signup.{Config => SignupConfig}
import docspell.common._
import docspell.store.JdbcConfig
import emil.javamail.Settings
case class Config(
mailDebug: Boolean,
jdbc: JdbcConfig,
signup: SignupConfig,
files: Config.Files
) {}
) {
def mailSettings: Settings =
Settings.defaultSettings.copy(debug = mailDebug)
}
object Config {

View File

@ -9,10 +9,29 @@ package docspell.backend
import cats.effect._
import cats.implicits._
import docspell.backend.MailAddressCodec
import docspell.common._
import docspell.notification.api.ChannelOrRef._
import docspell.notification.api.PeriodicQueryArgs
import docspell.store.records.RJob
object JobFactory {
object JobFactory extends MailAddressCodec {
def periodicQuery[F[_]: Sync](args: PeriodicQueryArgs, submitter: AccountId): F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
PeriodicQueryArgs.taskName,
submitter.collective,
args,
s"Running periodic query, notify via ${args.channel.channelType}",
now,
submitter.user,
Priority.Low,
None
)
} yield job
def makePageCount[F[_]: Sync](
args: MakePageCountArgs,

View File

@ -0,0 +1,22 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.backend
import emil._
import emil.javamail.syntax._
import io.circe.{Decoder, Encoder}
trait MailAddressCodec {
implicit val jsonEncoder: Encoder[MailAddress] =
Encoder.encodeString.contramap(_.asUnicodeString)
implicit val jsonDecoder: Decoder[MailAddress] =
Decoder.decodeString.emap(MailAddress.parse)
}
object MailAddressCodec extends MailAddressCodec

View File

@ -275,8 +275,8 @@ object Login {
token <- RememberToken.user(rme.id, config.serverSecret)
} yield token
private def check(given: String)(data: QLogin.Data): Boolean = {
val passOk = BCrypt.checkpw(given, data.password.pass)
private def check(givenPass: String)(data: QLogin.Data): Boolean = {
val passOk = BCrypt.checkpw(givenPass, data.password.pass)
checkNoPassword(data, Set(AccountSource.Local)) && passOk
}

View File

@ -10,19 +10,36 @@ import cats.effect._
import cats.implicits._
import docspell.common.{Duration, Ident, Priority}
import docspell.notification.api.Event
import docspell.notification.api.EventSink
import docspell.pubsub.api.PubSubT
import docspell.store.Store
import docspell.store.queue.JobQueue
import docspell.store.records.RJob
final class JobQueuePublish[F[_]: Sync](delegate: JobQueue[F], pubsub: PubSubT[F])
extends JobQueue[F] {
final class JobQueuePublish[F[_]: Sync](
delegate: JobQueue[F],
pubsub: PubSubT[F],
eventSink: EventSink[F]
) extends JobQueue[F] {
private def msg(job: RJob): JobSubmitted =
JobSubmitted(job.id, job.group, job.task, job.args)
private def event(job: RJob): Event.JobSubmitted =
Event.JobSubmitted(
job.id,
job.group,
job.task,
job.args,
job.state,
job.subject,
job.submitter
)
private def publish(job: RJob): F[Unit] =
pubsub.publish1(JobSubmitted.topic, msg(job)).as(())
pubsub.publish1(JobSubmitted.topic, msg(job)).as(()) *>
eventSink.offer(event(job))
def insert(job: RJob) =
delegate.insert(job).flatTap(_ => publish(job))
@ -54,6 +71,10 @@ final class JobQueuePublish[F[_]: Sync](delegate: JobQueue[F], pubsub: PubSubT[F
}
object JobQueuePublish {
def apply[F[_]: Async](store: Store[F], pubSub: PubSubT[F]): Resource[F, JobQueue[F]] =
JobQueue(store).map(q => new JobQueuePublish[F](q, pubSub))
def apply[F[_]: Async](
store: Store[F],
pubSub: PubSubT[F],
eventSink: EventSink[F]
): Resource[F, JobQueue[F]] =
JobQueue(store).map(q => new JobQueuePublish[F](q, pubSub, eventSink))
}

View File

@ -12,6 +12,7 @@ import cats.data.{NonEmptyList => Nel}
import cats.effect._
import cats.implicits._
import docspell.backend.AttachedEvent
import docspell.backend.ops.OCustomFields.CustomFieldData
import docspell.backend.ops.OCustomFields.CustomFieldOrder
import docspell.backend.ops.OCustomFields.FieldValue
@ -20,6 +21,7 @@ import docspell.backend.ops.OCustomFields.RemoveValue
import docspell.backend.ops.OCustomFields.SetValue
import docspell.backend.ops.OCustomFields.SetValueResult
import docspell.common._
import docspell.notification.api.Event
import docspell.store.AddResult
import docspell.store.Store
import docspell.store.UpdateResult
@ -53,12 +55,15 @@ trait OCustomFields[F[_]] {
def delete(coll: Ident, fieldIdOrName: Ident): F[UpdateResult]
/** Sets a value given a field an an item. Existing values are overwritten. */
def setValue(item: Ident, value: SetValue): F[SetValueResult]
def setValue(item: Ident, value: SetValue): F[AttachedEvent[SetValueResult]]
def setValueMultiple(items: Nel[Ident], value: SetValue): F[SetValueResult]
def setValueMultiple(
items: Nel[Ident],
value: SetValue
): F[AttachedEvent[SetValueResult]]
/** Deletes a value for a given field an item. */
def deleteValue(in: RemoveValue): F[UpdateResult]
def deleteValue(in: RemoveValue): F[AttachedEvent[UpdateResult]]
/** Finds all values to the given items */
def findAllValues(itemIds: Nel[Ident]): F[List[FieldValue]]
@ -196,13 +201,13 @@ object OCustomFields {
UpdateResult.fromUpdate(store.transact(update.getOrElse(0)))
}
def setValue(item: Ident, value: SetValue): F[SetValueResult] =
def setValue(item: Ident, value: SetValue): F[AttachedEvent[SetValueResult]] =
setValueMultiple(Nel.of(item), value)
def setValueMultiple(
items: Nel[Ident],
value: SetValue
): F[SetValueResult] =
): F[AttachedEvent[SetValueResult]] =
(for {
field <- EitherT.fromOptionF(
store.transact(RCustomField.findByIdOrName(value.field, value.collective)),
@ -224,17 +229,24 @@ object OCustomFields {
.traverse(item => store.transact(RCustomField.setValue(field, item, fval)))
.map(_.toList.sum)
)
} yield nu).fold(identity, _ => SetValueResult.success)
mkEvent =
Event.SetFieldValue.partial(items, field.id, fval)
def deleteValue(in: RemoveValue): F[UpdateResult] = {
} yield AttachedEvent(SetValueResult.success)(mkEvent))
.fold(AttachedEvent.only, identity)
def deleteValue(in: RemoveValue): F[AttachedEvent[UpdateResult]] = {
val update =
for {
(for {
field <- OptionT(RCustomField.findByIdOrName(in.field, in.collective))
_ <- OptionT.liftF(logger.debug(s"Field found by '${in.field}': $field"))
n <- OptionT.liftF(RCustomFieldValue.deleteValue(field.id, in.item))
} yield n
mkEvent = Event.DeleteFieldValue.partial(in.item, field.id)
} yield AttachedEvent(n)(mkEvent))
.getOrElse(AttachedEvent.only(0))
.map(_.map(UpdateResult.fromUpdateRows))
UpdateResult.fromUpdate(store.transact(update.getOrElse(0)))
store.transact(update)
}
})

View File

@ -6,15 +6,17 @@
package docspell.backend.ops
import cats.data.{NonEmptyList, OptionT}
import cats.data.{NonEmptyList => Nel, OptionT}
import cats.effect.{Async, Resource}
import cats.implicits._
import docspell.backend.AttachedEvent
import docspell.backend.JobFactory
import docspell.backend.fulltext.CreateIndex
import docspell.backend.item.Merge
import docspell.common._
import docspell.ftsclient.FtsClient
import docspell.notification.api.Event
import docspell.store.queries.{QAttachment, QItem, QMoveAttachment}
import docspell.store.queue.JobQueue
import docspell.store.records._
@ -26,42 +28,54 @@ import org.log4s.getLogger
trait OItem[F[_]] {
/** Sets the given tags (removing all existing ones). */
def setTags(item: Ident, tagIds: List[String], collective: Ident): F[UpdateResult]
def setTags(
item: Ident,
tagIds: List[String],
collective: Ident
): F[AttachedEvent[UpdateResult]]
/** Sets tags for multiple items. The tags of the items will be replaced with the given
* ones. Same as `setTags` but for multiple items.
*/
def setTagsMultipleItems(
items: NonEmptyList[Ident],
items: Nel[Ident],
tags: List[String],
collective: Ident
): F[UpdateResult]
): F[AttachedEvent[UpdateResult]]
/** Create a new tag and add it to the item. */
def addNewTag(item: Ident, tag: RTag): F[AddResult]
def addNewTag(collective: Ident, item: Ident, tag: RTag): F[AttachedEvent[AddResult]]
/** Apply all tags to the given item. Tags must exist, but can be IDs or names. Existing
* tags on the item are left unchanged.
*/
def linkTags(item: Ident, tags: List[String], collective: Ident): F[UpdateResult]
def linkTags(
item: Ident,
tags: List[String],
collective: Ident
): F[AttachedEvent[UpdateResult]]
def linkTagsMultipleItems(
items: NonEmptyList[Ident],
items: Nel[Ident],
tags: List[String],
collective: Ident
): F[UpdateResult]
): F[AttachedEvent[UpdateResult]]
def removeTagsMultipleItems(
items: NonEmptyList[Ident],
items: Nel[Ident],
tags: List[String],
collective: Ident
): F[UpdateResult]
): F[AttachedEvent[UpdateResult]]
/** Toggles tags of the given item. Tags must exist, but can be IDs or names. */
def toggleTags(item: Ident, tags: List[String], collective: Ident): F[UpdateResult]
def toggleTags(
item: Ident,
tags: List[String],
collective: Ident
): F[AttachedEvent[UpdateResult]]
def setDirection(
item: NonEmptyList[Ident],
item: Nel[Ident],
direction: Direction,
collective: Ident
): F[UpdateResult]
@ -69,13 +83,13 @@ trait OItem[F[_]] {
def setFolder(item: Ident, folder: Option[Ident], collective: Ident): F[UpdateResult]
def setFolderMultiple(
items: NonEmptyList[Ident],
items: Nel[Ident],
folder: Option[Ident],
collective: Ident
): F[UpdateResult]
def setCorrOrg(
items: NonEmptyList[Ident],
items: Nel[Ident],
org: Option[Ident],
collective: Ident
): F[UpdateResult]
@ -83,7 +97,7 @@ trait OItem[F[_]] {
def addCorrOrg(item: Ident, org: OOrganization.OrgAndContacts): F[AddResult]
def setCorrPerson(
items: NonEmptyList[Ident],
items: Nel[Ident],
person: Option[Ident],
collective: Ident
): F[UpdateResult]
@ -91,7 +105,7 @@ trait OItem[F[_]] {
def addCorrPerson(item: Ident, person: OOrganization.PersonAndContacts): F[AddResult]
def setConcPerson(
items: NonEmptyList[Ident],
items: Nel[Ident],
person: Option[Ident],
collective: Ident
): F[UpdateResult]
@ -99,7 +113,7 @@ trait OItem[F[_]] {
def addConcPerson(item: Ident, person: OOrganization.PersonAndContacts): F[AddResult]
def setConcEquip(
items: NonEmptyList[Ident],
items: Nel[Ident],
equip: Option[Ident],
collective: Ident
): F[UpdateResult]
@ -111,30 +125,30 @@ trait OItem[F[_]] {
def setName(item: Ident, name: String, collective: Ident): F[UpdateResult]
def setNameMultiple(
items: NonEmptyList[Ident],
items: Nel[Ident],
name: String,
collective: Ident
): F[UpdateResult]
def setState(item: Ident, state: ItemState, collective: Ident): F[AddResult] =
setStates(NonEmptyList.of(item), state, collective)
setStates(Nel.of(item), state, collective)
def setStates(
item: NonEmptyList[Ident],
item: Nel[Ident],
state: ItemState,
collective: Ident
): F[AddResult]
def restore(items: NonEmptyList[Ident], collective: Ident): F[UpdateResult]
def restore(items: Nel[Ident], collective: Ident): F[UpdateResult]
def setItemDate(
item: NonEmptyList[Ident],
item: Nel[Ident],
date: Option[Timestamp],
collective: Ident
): F[UpdateResult]
def setItemDueDate(
item: NonEmptyList[Ident],
item: Nel[Ident],
date: Option[Timestamp],
collective: Ident
): F[UpdateResult]
@ -143,14 +157,14 @@ trait OItem[F[_]] {
def deleteItem(itemId: Ident, collective: Ident): F[Int]
def deleteItemMultiple(items: NonEmptyList[Ident], collective: Ident): F[Int]
def deleteItemMultiple(items: Nel[Ident], collective: Ident): F[Int]
def deleteAttachment(id: Ident, collective: Ident): F[Int]
def setDeletedState(items: NonEmptyList[Ident], collective: Ident): F[Int]
def setDeletedState(items: Nel[Ident], collective: Ident): F[Int]
def deleteAttachmentMultiple(
attachments: NonEmptyList[Ident],
attachments: Nel[Ident],
collective: Ident
): F[Int]
@ -174,7 +188,7 @@ trait OItem[F[_]] {
): F[UpdateResult]
def reprocessAll(
items: NonEmptyList[Ident],
items: Nel[Ident],
account: AccountId,
notifyJoex: Boolean
): F[UpdateResult]
@ -204,13 +218,12 @@ trait OItem[F[_]] {
/** Merges a list of items into one item. The remaining items are deleted. */
def merge(
logger: Logger[F],
items: NonEmptyList[Ident],
items: Nel[Ident],
collective: Ident
): F[UpdateResult]
}
object OItem {
def apply[F[_]: Async](
store: Store[F],
fts: FtsClient[F],
@ -227,7 +240,7 @@ object OItem {
def merge(
logger: Logger[F],
items: NonEmptyList[Ident],
items: Nel[Ident],
collective: Ident
): F[UpdateResult] =
Merge(logger, store, this, createIndex).merge(items, collective).attempt.map {
@ -250,52 +263,62 @@ object OItem {
item: Ident,
tags: List[String],
collective: Ident
): F[UpdateResult] =
linkTagsMultipleItems(NonEmptyList.of(item), tags, collective)
): F[AttachedEvent[UpdateResult]] =
linkTagsMultipleItems(Nel.of(item), tags, collective)
def linkTagsMultipleItems(
items: NonEmptyList[Ident],
items: Nel[Ident],
tags: List[String],
collective: Ident
): F[UpdateResult] =
): F[AttachedEvent[UpdateResult]] =
tags.distinct match {
case Nil => UpdateResult.success.pure[F]
case Nil => AttachedEvent.only(UpdateResult.success).pure[F]
case ws =>
store.transact {
(for {
itemIds <- OptionT
.liftF(RItem.filterItems(items, collective))
.filter(_.nonEmpty)
given <- OptionT.liftF(RTag.findAllByNameOrId(ws, collective))
_ <- OptionT.liftF(
itemIds.traverse(item =>
RTagItem.appendTags(item, given.map(_.tagId).toList)
store
.transact {
(for {
itemIds <- OptionT
.liftF(RItem.filterItems(items, collective))
.subflatMap(l => Nel.fromFoldable(l))
given <- OptionT.liftF(RTag.findAllByNameOrId(ws, collective))
added <- OptionT.liftF(
itemIds.traverse(item =>
RTagItem.appendTags(item, given.map(_.tagId).toList)
)
)
)
} yield UpdateResult.success).getOrElse(UpdateResult.notFound)
}
ev = Event.TagsChanged.partial(
itemIds,
added.toList.flatten.map(_.id).toList,
Nil
)
} yield AttachedEvent(UpdateResult.success)(ev))
.getOrElse(AttachedEvent.only(UpdateResult.notFound))
}
}
def removeTagsMultipleItems(
items: NonEmptyList[Ident],
items: Nel[Ident],
tags: List[String],
collective: Ident
): F[UpdateResult] =
): F[AttachedEvent[UpdateResult]] =
tags.distinct match {
case Nil => UpdateResult.success.pure[F]
case Nil => AttachedEvent.only(UpdateResult.success).pure[F]
case ws =>
store.transact {
(for {
itemIds <- OptionT
.liftF(RItem.filterItems(items, collective))
.filter(_.nonEmpty)
.subflatMap(l => Nel.fromFoldable(l))
given <- OptionT.liftF(RTag.findAllByNameOrId(ws, collective))
_ <- OptionT.liftF(
itemIds.traverse(item =>
RTagItem.removeAllTags(item, given.map(_.tagId).toList)
)
)
} yield UpdateResult.success).getOrElse(UpdateResult.notFound)
mkEvent = Event.TagsChanged
.partial(itemIds, Nil, given.map(_.tagId.id).toList)
} yield AttachedEvent(UpdateResult.success)(mkEvent))
.getOrElse(AttachedEvent.only(UpdateResult.notFound))
}
}
@ -303,9 +326,9 @@ object OItem {
item: Ident,
tags: List[String],
collective: Ident
): F[UpdateResult] =
): F[AttachedEvent[UpdateResult]] =
tags.distinct match {
case Nil => UpdateResult.success.pure[F]
case Nil => AttachedEvent.only(UpdateResult.success).pure[F]
case kws =>
val db =
(for {
@ -316,7 +339,14 @@ object OItem {
toadd = given.map(_.tagId).diff(exist.map(_.tagId))
_ <- OptionT.liftF(RTagItem.setAllTags(item, toadd))
_ <- OptionT.liftF(RTagItem.removeAllTags(item, remove.toSeq))
} yield UpdateResult.success).getOrElse(UpdateResult.notFound)
mkEvent = Event.TagsChanged.partial(
Nel.of(item),
toadd.map(_.id).toList,
remove.map(_.id).toList
)
} yield AttachedEvent(UpdateResult.success)(mkEvent))
.getOrElse(AttachedEvent.only(UpdateResult.notFound))
store.transact(db)
}
@ -325,41 +355,69 @@ object OItem {
item: Ident,
tagIds: List[String],
collective: Ident
): F[UpdateResult] =
setTagsMultipleItems(NonEmptyList.of(item), tagIds, collective)
): F[AttachedEvent[UpdateResult]] =
setTagsMultipleItems(Nel.of(item), tagIds, collective)
def setTagsMultipleItems(
items: NonEmptyList[Ident],
items: Nel[Ident],
tags: List[String],
collective: Ident
): F[UpdateResult] =
UpdateResult.fromUpdate(store.transact(for {
k <- RTagItem.deleteItemTags(items, collective)
rtags <- RTag.findAllByNameOrId(tags, collective)
res <- items.traverse(i => RTagItem.setAllTags(i, rtags.map(_.tagId)))
n = res.fold
} yield k + n))
): F[AttachedEvent[UpdateResult]] = {
val dbTask =
for {
k <- RTagItem.deleteItemTags(items, collective)
given <- RTag.findAllByNameOrId(tags, collective)
res <- items.traverse(i => RTagItem.setAllTags(i, given.map(_.tagId)))
n = res.fold
mkEvent = Event.TagsChanged.partial(
items,
given.map(_.tagId.id).toList,
Nil
)
} yield AttachedEvent(k + n)(mkEvent)
def addNewTag(item: Ident, tag: RTag): F[AddResult] =
for {
data <- store.transact(dbTask)
} yield data.map(UpdateResult.fromUpdateRows)
}
def addNewTag(
collective: Ident,
item: Ident,
tag: RTag
): F[AttachedEvent[AddResult]] =
(for {
_ <- OptionT(store.transact(RItem.getCollective(item)))
.filter(_ == tag.collective)
addres <- OptionT.liftF(otag.add(tag))
_ <- addres match {
res <- addres match {
case AddResult.Success =>
OptionT.liftF(
store.transact(RTagItem.setAllTags(item, List(tag.tagId)))
store
.transact(RTagItem.setAllTags(item, List(tag.tagId)))
.map { _ =>
AttachedEvent(())(
Event.TagsChanged.partial(
Nel.of(item),
List(tag.tagId.id),
Nil
)
)
}
)
case AddResult.EntityExists(_) =>
OptionT.pure[F](0)
OptionT.pure[F](AttachedEvent.only(()))
case AddResult.Failure(_) =>
OptionT.pure[F](0)
OptionT.pure[F](AttachedEvent.only(()))
}
} yield addres)
.getOrElse(AddResult.Failure(new Exception("Collective mismatch")))
} yield res.map(_ => addres))
.getOrElse(
AttachedEvent.only(AddResult.Failure(new Exception("Collective mismatch")))
)
def setDirection(
items: NonEmptyList[Ident],
items: Nel[Ident],
direction: Direction,
collective: Ident
): F[UpdateResult] =
@ -383,7 +441,7 @@ object OItem {
)
def setFolderMultiple(
items: NonEmptyList[Ident],
items: Nel[Ident],
folder: Option[Ident],
collective: Ident
): F[UpdateResult] =
@ -404,7 +462,7 @@ object OItem {
} yield res
def setCorrOrg(
items: NonEmptyList[Ident],
items: Nel[Ident],
org: Option[Ident],
collective: Ident
): F[UpdateResult] =
@ -423,7 +481,7 @@ object OItem {
OptionT.liftF(
store.transact(
RItem.updateCorrOrg(
NonEmptyList.of(item),
Nel.of(item),
org.org.cid,
Some(org.org.oid)
)
@ -438,7 +496,7 @@ object OItem {
.getOrElse(AddResult.Failure(new Exception("Collective mismatch")))
def setCorrPerson(
items: NonEmptyList[Ident],
items: Nel[Ident],
person: Option[Ident],
collective: Ident
): F[UpdateResult] =
@ -461,7 +519,7 @@ object OItem {
store.transact(
RItem
.updateCorrPerson(
NonEmptyList.of(item),
Nel.of(item),
person.person.cid,
Some(person.person.pid)
)
@ -476,7 +534,7 @@ object OItem {
.getOrElse(AddResult.Failure(new Exception("Collective mismatch")))
def setConcPerson(
items: NonEmptyList[Ident],
items: Nel[Ident],
person: Option[Ident],
collective: Ident
): F[UpdateResult] =
@ -499,7 +557,7 @@ object OItem {
store.transact(
RItem
.updateConcPerson(
NonEmptyList.of(item),
Nel.of(item),
person.person.cid,
Some(person.person.pid)
)
@ -514,7 +572,7 @@ object OItem {
.getOrElse(AddResult.Failure(new Exception("Collective mismatch")))
def setConcEquip(
items: NonEmptyList[Ident],
items: Nel[Ident],
equip: Option[Ident],
collective: Ident
): F[UpdateResult] =
@ -533,7 +591,7 @@ object OItem {
OptionT.liftF(
store.transact(
RItem
.updateConcEquip(NonEmptyList.of(item), equip.cid, Some(equip.eid))
.updateConcEquip(Nel.of(item), equip.cid, Some(equip.eid))
)
)
case AddResult.EntityExists(_) =>
@ -569,7 +627,7 @@ object OItem {
)
def setNameMultiple(
items: NonEmptyList[Ident],
items: Nel[Ident],
name: String,
collective: Ident
): F[UpdateResult] =
@ -590,7 +648,7 @@ object OItem {
} yield res
def setStates(
items: NonEmptyList[Ident],
items: Nel[Ident],
state: ItemState,
collective: Ident
): F[AddResult] =
@ -600,7 +658,7 @@ object OItem {
.map(AddResult.fromUpdate)
def restore(
items: NonEmptyList[Ident],
items: Nel[Ident],
collective: Ident
): F[UpdateResult] =
UpdateResult.fromUpdate(for {
@ -612,7 +670,7 @@ object OItem {
} yield n)
def setItemDate(
items: NonEmptyList[Ident],
items: Nel[Ident],
date: Option[Timestamp],
collective: Ident
): F[UpdateResult] =
@ -622,7 +680,7 @@ object OItem {
)
def setItemDueDate(
items: NonEmptyList[Ident],
items: Nel[Ident],
date: Option[Timestamp],
collective: Ident
): F[UpdateResult] =
@ -636,14 +694,14 @@ object OItem {
.delete(store)(itemId, collective)
.flatTap(_ => fts.removeItem(logger, itemId))
def deleteItemMultiple(items: NonEmptyList[Ident], collective: Ident): F[Int] =
def deleteItemMultiple(items: Nel[Ident], collective: Ident): F[Int] =
for {
itemIds <- store.transact(RItem.filterItems(items, collective))
results <- itemIds.traverse(item => deleteItem(item, collective))
n = results.sum
} yield n
def setDeletedState(items: NonEmptyList[Ident], collective: Ident): F[Int] =
def setDeletedState(items: Nel[Ident], collective: Ident): F[Int] =
for {
n <- store.transact(RItem.setState(items, collective, ItemState.Deleted))
_ <- items.traverse(id => fts.removeItem(logger, id))
@ -658,7 +716,7 @@ object OItem {
.flatTap(_ => fts.removeAttachment(logger, id))
def deleteAttachmentMultiple(
attachments: NonEmptyList[Ident],
attachments: Nel[Ident],
collective: Ident
): F[Int] =
for {
@ -710,7 +768,7 @@ object OItem {
} yield UpdateResult.success).getOrElse(UpdateResult.notFound)
def reprocessAll(
items: NonEmptyList[Ident],
items: Nel[Ident],
account: AccountId,
notifyJoex: Boolean
): F[UpdateResult] =

View File

@ -0,0 +1,347 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.backend.ops
import java.io.PrintWriter
import java.io.StringWriter
import cats.data.OptionT
import cats.data.{NonEmptyList => Nel}
import cats.effect._
import cats.implicits._
import docspell.backend.ops.ONotification.Hook
import docspell.common._
import docspell.jsonminiq.JsonMiniQuery
import docspell.notification.api._
import docspell.store.AddResult
import docspell.store.Store
import docspell.store.UpdateResult
import docspell.store.queries.QNotification
import docspell.store.records._
trait ONotification[F[_]] {
def sendMessage(
logger: Logger[F],
data: EventContext,
channels: Seq[NotificationChannel]
): F[Unit]
def offerEvents(ev: Iterable[Event]): F[Unit]
def mkNotificationChannel(channel: Channel): F[Vector[NotificationChannel]]
def findNotificationChannel(ref: ChannelRef): F[Vector[NotificationChannel]]
def listChannels(account: AccountId): F[Vector[Channel]]
def deleteChannel(id: Ident, account: AccountId): F[UpdateResult]
def createChannel(channel: Channel, account: AccountId): F[AddResult]
def updateChannel(channel: Channel, account: AccountId): F[UpdateResult]
def listHooks(account: AccountId): F[Vector[Hook]]
def deleteHook(id: Ident, account: AccountId): F[UpdateResult]
def createHook(hook: Hook, account: AccountId): F[AddResult]
def updateHook(hook: Hook, account: AccountId): F[UpdateResult]
def sampleEvent(
evt: EventType,
account: AccountId,
baseUrl: Option[LenientUri]
): F[EventContext]
def sendSampleEvent(
evt: EventType,
channel: Channel,
account: AccountId,
baseUrl: Option[LenientUri]
): F[ONotification.SendTestResult]
}
object ONotification {
private[this] val logger = org.log4s.getLogger
def apply[F[_]: Async](
store: Store[F],
notMod: NotificationModule[F]
): Resource[F, ONotification[F]] =
Resource.pure[F, ONotification[F]](new ONotification[F] {
val log = Logger.log4s[F](logger)
def withUserId[A](
account: AccountId
)(f: Ident => F[UpdateResult]): F[UpdateResult] =
OptionT(store.transact(RUser.findIdByAccount(account)))
.semiflatMap(f)
.getOrElse(UpdateResult.notFound)
def offerEvents(ev: Iterable[Event]): F[Unit] =
ev.toList.traverse(notMod.offer(_)).as(())
def sendMessage(
logger: Logger[F],
data: EventContext,
channels: Seq[NotificationChannel]
): F[Unit] =
notMod.send(logger, data, channels)
def sampleEvent(
evt: EventType,
account: AccountId,
baseUrl: Option[LenientUri]
): F[EventContext] =
Event
.sample[F](evt, account, baseUrl)
.flatMap(notMod.sampleEvent.run)
def sendSampleEvent(
evt: EventType,
channel: Channel,
account: AccountId,
baseUrl: Option[LenientUri]
): F[SendTestResult] =
(for {
ev <- sampleEvent(evt, account, baseUrl)
logbuf <- Logger.buffer()
ch <- mkNotificationChannel(channel)
_ <- notMod.send(logbuf._2.andThen(log), ev, ch)
logs <- logbuf._1.get
res = SendTestResult(true, logs)
} yield res).attempt
.map {
case Right(res) => res
case Left(ex) =>
val ps = new StringWriter()
ex.printStackTrace(new PrintWriter(ps))
SendTestResult(false, Vector(s"${ex.getMessage}\n$ps"))
}
def listChannels(account: AccountId): F[Vector[Channel]] =
store
.transact(RNotificationChannel.getByAccount(account))
.map(_.map(ChannelConv.makeChannel))
def deleteChannel(id: Ident, account: AccountId): F[UpdateResult] =
UpdateResult
.fromUpdate(
store.transact(RNotificationChannel.deleteByAccount(id, account))
)
.flatTap(_ => log.info(s"Deleted channel ${id.id} for ${account.asString}"))
def createChannel(channel: Channel, account: AccountId): F[AddResult] =
(for {
newId <- OptionT.liftF(Ident.randomId[F])
userId <- OptionT(store.transact(RUser.findIdByAccount(account)))
r <- ChannelConv.makeRecord[F](store, Right(channel), newId, userId)
_ <- OptionT.liftF(store.transact(RNotificationChannel.insert(r)))
_ <- OptionT.liftF(log.debug(s"Created channel $r for $account"))
} yield AddResult.Success)
.getOrElse(AddResult.failure(new Exception("User not found!")))
def updateChannel(channel: Channel, account: AccountId): F[UpdateResult] =
(for {
userId <- OptionT(store.transact(RUser.findIdByAccount(account)))
r <- ChannelConv.makeRecord[F](store, Right(channel), channel.id, userId)
n <- OptionT.liftF(store.transact(RNotificationChannel.update(r)))
} yield UpdateResult.fromUpdateRows(n)).getOrElse(UpdateResult.notFound)
def listHooks(account: AccountId): F[Vector[Hook]] =
store.transact(for {
list <- RNotificationHook.findAllByAccount(account)
res <- list.traverse((Hook.fromRecord _).tupled)
} yield res)
def deleteHook(id: Ident, account: AccountId): F[UpdateResult] =
UpdateResult
.fromUpdate(store.transact(RNotificationHook.deleteByAccount(id, account)))
def createHook(hook: Hook, account: AccountId): F[AddResult] =
(for {
_ <- OptionT.liftF(log.debug(s"Creating new notification hook: $hook"))
channelId <- OptionT.liftF(Ident.randomId[F])
userId <- OptionT(store.transact(RUser.findIdByAccount(account)))
r <- ChannelConv.makeRecord[F](store, hook.channel, channelId, userId)
_ <- OptionT.liftF(
if (channelId == r.id) store.transact(RNotificationChannel.insert(r))
else ().pure[F]
)
_ <- OptionT.liftF(log.debug(s"Created channel $r for $account"))
hr <- OptionT.liftF(Hook.makeRecord(r, userId, hook))
_ <- OptionT.liftF(store.transact(RNotificationHook.insert(hr)))
_ <- OptionT.liftF(
store.transact(RNotificationHookEvent.insertAll(hr.id, hook.events))
)
} yield AddResult.Success)
.getOrElse(AddResult.failure(new Exception("User or channel not found!")))
def updateHook(hook: Hook, account: AccountId): F[UpdateResult] = {
def withHook(f: RNotificationHook => F[UpdateResult]): F[UpdateResult] =
withUserId(account)(userId =>
OptionT(store.transact(RNotificationHook.getById(hook.id, userId)))
.semiflatMap(f)
.getOrElse(UpdateResult.notFound)
)
def withChannel(
r: RNotificationHook
)(f: RNotificationChannel => F[UpdateResult]): F[UpdateResult] =
ChannelConv
.makeRecord(store, hook.channel, r.channelId, r.uid)
.semiflatMap(f)
.getOrElse(UpdateResult.notFound)
def doUpdate(r: RNotificationHook): F[UpdateResult] =
withChannel(r) { ch =>
UpdateResult.fromUpdate(store.transact(for {
nc <- RNotificationChannel.update(ch)
ne <- RNotificationHookEvent.updateAll(
r.id,
if (hook.allEvents) Nil else hook.events
)
nr <- RNotificationHook.update(
r.copy(
enabled = hook.enabled,
allEvents = hook.allEvents,
eventFilter = hook.eventFilter
)
)
} yield nc + ne + nr))
}
withHook(doUpdate)
}
def mkNotificationChannel(channel: Channel): F[Vector[NotificationChannel]] =
(for {
rec <- ChannelConv
.makeRecord(store, Right(channel), channel.id, Ident.unsafe(""))
ch <- OptionT.liftF(store.transact(QNotification.readChannel(rec)))
} yield ch).getOrElse(Vector.empty)
def findNotificationChannel(ref: ChannelRef): F[Vector[NotificationChannel]] =
(for {
rec <- OptionT(store.transact(RNotificationChannel.getByRef(ref)))
ch <- OptionT.liftF(store.transact(QNotification.readChannel(rec)))
} yield ch).getOrElse(Vector.empty)
})
object ChannelConv {
private[ops] def makeChannel(r: RNotificationChannel): Channel =
r.fold(
mail =>
Channel.Mail(mail.id, mail.connection, Nel.fromListUnsafe(mail.recipients)),
gotify => Channel.Gotify(r.id, gotify.url, gotify.appKey),
matrix =>
Channel.Matrix(r.id, matrix.homeServer, matrix.roomId, matrix.accessToken),
http => Channel.Http(r.id, http.url)
)
private[ops] def makeRecord[F[_]: Sync](
store: Store[F],
channelIn: Either[ChannelRef, Channel],
id: Ident,
userId: Ident
): OptionT[F, RNotificationChannel] =
channelIn match {
case Left(ref) =>
OptionT(store.transact(RNotificationChannel.getByRef(ref)))
case Right(channel) =>
for {
time <- OptionT.liftF(Timestamp.current[F])
r <-
channel match {
case Channel.Mail(_, conn, recipients) =>
for {
mailConn <- OptionT(
store.transact(RUserEmail.getByUser(userId, conn))
)
rec = RNotificationChannelMail(
id,
userId,
mailConn.id,
recipients.toList,
time
).vary
} yield rec
case Channel.Gotify(_, url, appKey) =>
OptionT.pure[F](
RNotificationChannelGotify(id, userId, url, appKey, time).vary
)
case Channel.Matrix(_, homeServer, roomId, accessToken) =>
OptionT.pure[F](
RNotificationChannelMatrix(
id,
userId,
homeServer,
roomId,
accessToken,
"m.text",
time
).vary
)
case Channel.Http(_, url) =>
OptionT.pure[F](RNotificationChannelHttp(id, userId, url, time).vary)
}
} yield r
}
}
final case class Hook(
id: Ident,
enabled: Boolean,
channel: Either[ChannelRef, Channel],
allEvents: Boolean,
eventFilter: Option[JsonMiniQuery],
events: List[EventType]
)
object Hook {
import doobie._
private[ops] def fromRecord(
r: RNotificationHook,
events: List[EventType]
): ConnectionIO[Hook] =
RNotificationChannel
.getByHook(r)
.map(_.head)
.map(ChannelConv.makeChannel)
.map(ch => Hook(r.id, r.enabled, Right(ch), r.allEvents, r.eventFilter, events))
private[ops] def makeRecord[F[_]: Sync](
ch: RNotificationChannel,
userId: Ident,
hook: Hook
): F[RNotificationHook] =
for {
id <- Ident.randomId[F]
time <- Timestamp.current[F]
h = RNotificationHook(
id,
userId,
hook.enabled,
ch.fold(_.id.some, _ => None, _ => None, _ => None),
ch.fold(_ => None, _.id.some, _ => None, _ => None),
ch.fold(_ => None, _ => None, _.id.some, _ => None),
ch.fold(_ => None, _ => None, _ => None, _.id.some),
hook.allEvents,
hook.eventFilter,
time
)
} yield h
}
final case class SendTestResult(success: Boolean, logMessages: Vector[String])
}

View File

@ -11,7 +11,10 @@ import cats.effect._
import cats.implicits._
import fs2.Stream
import docspell.backend.MailAddressCodec._
import docspell.common._
import docspell.notification.api.PeriodicDueItemsArgs
import docspell.notification.api.PeriodicQueryArgs
import docspell.store.queue.JobQueue
import docspell.store.usertask._
@ -19,6 +22,22 @@ import io.circe.Encoder
trait OUserTask[F[_]] {
/** Return the settings for all periodic-query tasks of the given user */
def getPeriodicQuery(scope: UserTaskScope): Stream[F, UserTask[PeriodicQueryArgs]]
/** Find a periodic-query task by the given id. */
def findPeriodicQuery(
id: Ident,
scope: UserTaskScope
): OptionT[F, UserTask[PeriodicQueryArgs]]
/** Updates the periodic-query task of the given user. */
def submitPeriodicQuery(
scope: UserTaskScope,
subject: Option[String],
task: UserTask[PeriodicQueryArgs]
): F[Unit]
/** Return the settings for all scan-mailbox tasks of the current user. */
def getScanMailbox(scope: UserTaskScope): Stream[F, UserTask[ScanMailboxArgs]]
@ -36,19 +55,19 @@ trait OUserTask[F[_]] {
): F[Unit]
/** Return the settings for all the notify-due-items task of the current user. */
def getNotifyDueItems(scope: UserTaskScope): Stream[F, UserTask[NotifyDueItemsArgs]]
def getNotifyDueItems(scope: UserTaskScope): Stream[F, UserTask[PeriodicDueItemsArgs]]
/** Find a notify-due-items task by the given id. */
def findNotifyDueItems(
id: Ident,
scope: UserTaskScope
): OptionT[F, UserTask[NotifyDueItemsArgs]]
): OptionT[F, UserTask[PeriodicDueItemsArgs]]
/** Updates the notify-due-items tasks and notifies the joex nodes. */
def submitNotifyDueItems(
scope: UserTaskScope,
subject: Option[String],
task: UserTask[NotifyDueItemsArgs]
task: UserTask[PeriodicDueItemsArgs]
): F[Unit]
/** Removes a user task with the given id. */
@ -109,23 +128,42 @@ object OUserTask {
def getNotifyDueItems(
scope: UserTaskScope
): Stream[F, UserTask[NotifyDueItemsArgs]] =
): Stream[F, UserTask[PeriodicDueItemsArgs]] =
store
.getByName[NotifyDueItemsArgs](scope, NotifyDueItemsArgs.taskName)
.getByName[PeriodicDueItemsArgs](scope, PeriodicDueItemsArgs.taskName)
def findNotifyDueItems(
id: Ident,
scope: UserTaskScope
): OptionT[F, UserTask[NotifyDueItemsArgs]] =
): OptionT[F, UserTask[PeriodicDueItemsArgs]] =
OptionT(getNotifyDueItems(scope).find(_.id == id).compile.last)
def submitNotifyDueItems(
scope: UserTaskScope,
subject: Option[String],
task: UserTask[NotifyDueItemsArgs]
task: UserTask[PeriodicDueItemsArgs]
): F[Unit] =
for {
_ <- store.updateTask[NotifyDueItemsArgs](scope, subject, task)
_ <- store.updateTask[PeriodicDueItemsArgs](scope, subject, task)
_ <- joex.notifyAllNodes
} yield ()
def getPeriodicQuery(scope: UserTaskScope): Stream[F, UserTask[PeriodicQueryArgs]] =
store.getByName[PeriodicQueryArgs](scope, PeriodicQueryArgs.taskName)
def findPeriodicQuery(
id: Ident,
scope: UserTaskScope
): OptionT[F, UserTask[PeriodicQueryArgs]] =
OptionT(getPeriodicQuery(scope).find(_.id == id).compile.last)
def submitPeriodicQuery(
scope: UserTaskScope,
subject: Option[String],
task: UserTask[PeriodicQueryArgs]
): F[Unit] =
for {
_ <- store.updateTask[PeriodicQueryArgs](scope, subject, task)
_ <- joex.notifyAllNodes
} yield ()
})

View File

@ -6,6 +6,8 @@
package docspell.common
import io.circe.{Decoder, Encoder}
final case class ItemQueryString(query: String) {
def isEmpty: Boolean =
query.isEmpty
@ -15,4 +17,9 @@ object ItemQueryString {
def apply(qs: Option[String]): ItemQueryString =
ItemQueryString(qs.getOrElse(""))
implicit val jsonEncoder: Encoder[ItemQueryString] =
Encoder.encodeString.contramap(_.query)
implicit val jsonDecoder: Decoder[ItemQueryString] =
Decoder.decodeString.map(ItemQueryString.apply)
}

View File

@ -103,6 +103,8 @@ case class LenientUri(
val fragPart = fragment.map(f => s"#$f").getOrElse("")
s"$schemePart:$authPart$pathPart$queryPart$fragPart"
}
override def toString(): String =
asString
}
object LenientUri {

View File

@ -6,8 +6,11 @@
package docspell.common
import java.io.{PrintWriter, StringWriter}
import cats.Applicative
import cats.effect.Sync
import cats.effect.{Ref, Sync}
import cats.implicits._
import fs2.Stream
import docspell.common.syntax.all._
@ -42,6 +45,28 @@ trait Logger[F[_]] { self =>
def error(ex: Throwable)(msg: => String): Stream[F, Unit] =
Stream.eval(self.error(ex)(msg))
}
def andThen(other: Logger[F])(implicit F: Sync[F]): Logger[F] = {
val self = this
new Logger[F] {
def trace(msg: => String) =
self.trace(msg) >> other.trace(msg)
override def debug(msg: => String) =
self.debug(msg) >> other.debug(msg)
override def info(msg: => String) =
self.info(msg) >> other.info(msg)
override def warn(msg: => String) =
self.warn(msg) >> other.warn(msg)
override def error(ex: Throwable)(msg: => String) =
self.error(ex)(msg) >> other.error(ex)(msg)
override def error(msg: => String) =
self.error(msg) >> other.error(msg)
}
}
}
object Logger {
@ -88,4 +113,31 @@ object Logger {
log.ferror(msg)
}
def buffer[F[_]: Sync](): F[(Ref[F, Vector[String]], Logger[F])] =
for {
buffer <- Ref.of[F, Vector[String]](Vector.empty[String])
logger = new Logger[F] {
def trace(msg: => String) =
buffer.update(_.appended(s"TRACE $msg"))
def debug(msg: => String) =
buffer.update(_.appended(s"DEBUG $msg"))
def info(msg: => String) =
buffer.update(_.appended(s"INFO $msg"))
def warn(msg: => String) =
buffer.update(_.appended(s"WARN $msg"))
def error(ex: Throwable)(msg: => String) = {
val ps = new StringWriter()
ex.printStackTrace(new PrintWriter(ps))
buffer.update(_.appended(s"ERROR $msg:\n$ps"))
}
def error(msg: => String) =
buffer.update(_.appended(s"ERROR $msg"))
}
} yield (buffer, logger)
}

View File

@ -27,7 +27,7 @@ case class NotifyDueItemsArgs(
daysBack: Option[Int],
tagsInclude: List[Ident],
tagsExclude: List[Ident]
) {}
)
object NotifyDueItemsArgs {

View File

@ -11,6 +11,7 @@ import cats.implicits._
import fs2.concurrent.SignallingRef
import docspell.analysis.TextAnalyser
import docspell.backend.MailAddressCodec
import docspell.backend.fulltext.CreateIndex
import docspell.backend.msg.{CancelJob, JobQueuePublish, Topics}
import docspell.backend.ops._
@ -32,6 +33,8 @@ import docspell.joex.process.ReProcessItem
import docspell.joex.scanmailbox._
import docspell.joex.scheduler._
import docspell.joex.updatecheck._
import docspell.notification.api.NotificationModule
import docspell.notification.impl.NotificationModuleImpl
import docspell.pubsub.api.{PubSub, PubSubT}
import docspell.store.Store
import docspell.store.queue._
@ -49,16 +52,19 @@ final class JoexAppImpl[F[_]: Async](
pubSubT: PubSubT[F],
pstore: PeriodicTaskStore[F],
termSignal: SignallingRef[F, Boolean],
notificationMod: NotificationModule[F],
val scheduler: Scheduler[F],
val periodicScheduler: PeriodicScheduler[F]
) extends JoexApp[F] {
def init: F[Unit] = {
val run = scheduler.start.compile.drain
val prun = periodicScheduler.start.compile.drain
val eventConsume = notificationMod.consumeAllEvents(2).compile.drain
for {
_ <- scheduleBackgroundTasks
_ <- Async[F].start(run)
_ <- Async[F].start(prun)
_ <- Async[F].start(eventConsume)
_ <- scheduler.periodicAwake
_ <- periodicScheduler.periodicAwake
_ <- subscriptions
@ -115,7 +121,7 @@ final class JoexAppImpl[F[_]: Async](
}
object JoexAppImpl {
object JoexAppImpl extends MailAddressCodec {
def create[F[_]: Async](
cfg: Config,
@ -130,7 +136,12 @@ object JoexAppImpl {
pubSub,
Logger.log4s(org.log4s.getLogger(s"joex-${cfg.appId.id}"))
)
queue <- JobQueuePublish(store, pubSubT)
javaEmil =
JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug))
notificationMod <- Resource.eval(
NotificationModuleImpl[F](store, javaEmil, httpClient, 200)
)
queue <- JobQueuePublish(store, pubSubT, notificationMod)
joex <- OJoex(pubSubT)
upload <- OUpload(store, queue, joex)
fts <- createFtsClient(cfg)(httpClient)
@ -140,11 +151,11 @@ object JoexAppImpl {
analyser <- TextAnalyser.create[F](cfg.textAnalysis.textAnalysisConfig)
regexNer <- RegexNerFile(cfg.textAnalysis.regexNerFileConfig, store)
updateCheck <- UpdateCheck.resource(httpClient)
javaEmil =
JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug))
notification <- ONotification(store, notificationMod)
sch <- SchedulerBuilder(cfg.scheduler, store)
.withQueue(queue)
.withPubSub(pubSubT)
.withEventSink(notificationMod)
.withTask(
JobTask.json(
ProcessItemArgs.taskName,
@ -263,6 +274,20 @@ object JoexAppImpl {
UpdateCheckTask.onCancel[F]
)
)
.withTask(
JobTask.json(
PeriodicQueryTask.taskName,
PeriodicQueryTask[F](notification),
PeriodicQueryTask.onCancel[F]
)
)
.withTask(
JobTask.json(
PeriodicDueItemsTask.taskName,
PeriodicDueItemsTask[F](notification),
PeriodicDueItemsTask.onCancel[F]
)
)
.resource
psch <- PeriodicScheduler.create(
cfg.periodicScheduler,
@ -271,7 +296,17 @@ object JoexAppImpl {
pstore,
joex
)
app = new JoexAppImpl(cfg, store, queue, pubSubT, pstore, termSignal, sch, psch)
app = new JoexAppImpl(
cfg,
store,
queue,
pubSubT,
pstore,
termSignal,
notificationMod,
sch,
psch
)
appR <- Resource.make(app.init.map(_ => app))(_.initShutdown)
} yield appR

View File

@ -0,0 +1,106 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.joex.notify
import cats.data.NonEmptyList
import cats.effect._
import cats.implicits._
import docspell.backend.ops.ONotification
import docspell.common._
import docspell.joex.scheduler.Context
import docspell.joex.scheduler.Task
import docspell.notification.api.EventContext
import docspell.notification.api.NotificationChannel
import docspell.notification.api.PeriodicDueItemsArgs
import docspell.query.Date
import docspell.query.ItemQuery._
import docspell.query.ItemQueryDsl._
import docspell.store.qb.Batch
import docspell.store.queries.ListItem
import docspell.store.queries.{QItem, Query}
object PeriodicDueItemsTask {
val taskName = PeriodicDueItemsArgs.taskName
type Args = PeriodicDueItemsArgs
def onCancel[F[_]]: Task[F, Args, Unit] =
Task.log(_.warn(s"Cancelling ${taskName.id} task"))
def apply[F[_]: Sync](notificationOps: ONotification[F]): Task[F, Args, Unit] =
Task { ctx =>
val limit = 7
Timestamp.current[F].flatMap { now =>
withItems(ctx, limit, now) { items =>
withEventContext(ctx, items, limit, now) { eventCtx =>
withChannel(ctx, notificationOps) { channels =>
notificationOps.sendMessage(ctx.logger, eventCtx, channels)
}
}
}
}
}
def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])(
cont: Vector[NotificationChannel] => F[Unit]
): F[Unit] =
TaskOperations.withChannel(ctx.logger, ctx.args.channel, ops)(cont)
def withItems[F[_]: Sync](ctx: Context[F, Args], limit: Int, now: Timestamp)(
cont: Vector[ListItem] => F[Unit]
): F[Unit] = {
val rightDate = Date((now + Duration.days(ctx.args.remindDays.toLong)).toMillis)
val q =
Query
.all(ctx.args.account)
.withOrder(orderAsc = _.dueDate)
.withFix(_.copy(query = Expr.ValidItemStates.some))
.withCond(_ =>
Query.QueryExpr(
Attr.DueDate <= rightDate &&?
ctx.args.daysBack.map(back =>
Attr.DueDate >= Date((now - Duration.days(back.toLong)).toMillis)
) &&?
NonEmptyList
.fromList(ctx.args.tagsInclude)
.map(ids => Q.tagIdsEq(ids.map(_.id))) &&?
NonEmptyList
.fromList(ctx.args.tagsExclude)
.map(ids => Q.tagIdsIn(ids.map(_.id)).negate)
)
)
for {
res <-
ctx.store
.transact(
QItem
.findItems(q, now.toUtcDate, 0, Batch.limit(limit))
.take(limit.toLong)
)
.compile
.toVector
_ <- cont(res)
} yield ()
}
def withEventContext[F[_]](
ctx: Context[F, Args],
items: Vector[ListItem],
limit: Int,
now: Timestamp
)(cont: EventContext => F[Unit]): F[Unit] =
TaskOperations.withEventContext(
ctx.logger,
ctx.args.account,
ctx.args.baseUrl,
items,
limit,
now
)(cont)
}

View File

@ -0,0 +1,84 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.joex.notify
import cats.effect._
import cats.implicits._
import docspell.backend.ops.ONotification
import docspell.common._
import docspell.joex.scheduler.Context
import docspell.joex.scheduler.Task
import docspell.notification.api.EventContext
import docspell.notification.api.NotificationChannel
import docspell.notification.api.PeriodicQueryArgs
import docspell.query.ItemQueryParser
import docspell.store.qb.Batch
import docspell.store.queries.ListItem
import docspell.store.queries.{QItem, Query}
object PeriodicQueryTask {
val taskName = PeriodicQueryArgs.taskName
type Args = PeriodicQueryArgs
def onCancel[F[_]]: Task[F, Args, Unit] =
Task.log(_.warn(s"Cancelling ${taskName.id} task"))
def apply[F[_]: Sync](notificationOps: ONotification[F]): Task[F, Args, Unit] =
Task { ctx =>
val limit = 7
Timestamp.current[F].flatMap { now =>
withItems(ctx, limit, now) { items =>
withEventContext(ctx, items, limit, now) { eventCtx =>
withChannel(ctx, notificationOps) { channels =>
notificationOps.sendMessage(ctx.logger, eventCtx, channels)
}
}
}
}
}
def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])(
cont: Vector[NotificationChannel] => F[Unit]
): F[Unit] =
TaskOperations.withChannel(ctx.logger, ctx.args.channel, ops)(cont)
def withItems[F[_]: Sync](ctx: Context[F, Args], limit: Int, now: Timestamp)(
cont: Vector[ListItem] => F[Unit]
): F[Unit] =
ItemQueryParser.parse(ctx.args.query.query) match {
case Right(q) =>
val query = Query(Query.Fix(ctx.args.account, Some(q.expr), None))
val items = ctx.store
.transact(QItem.findItems(query, now.toUtcDate, 0, Batch.limit(limit)))
.compile
.to(Vector)
items.flatMap(cont)
case Left(err) =>
ctx.logger.error(
s"Item query is invalid, stopping: ${ctx.args.query} - ${err.render}"
)
}
def withEventContext[F[_]](
ctx: Context[F, Args],
items: Vector[ListItem],
limit: Int,
now: Timestamp
)(cont: EventContext => F[Unit]): F[Unit] =
TaskOperations.withEventContext(
ctx.logger,
ctx.args.account,
ctx.args.baseUrl,
items,
limit,
now
)(cont)
}

View File

@ -0,0 +1,64 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.joex.notify
import cats.data.NonEmptyList
import cats.effect._
import cats.implicits._
import docspell.backend.ops.ONotification
import docspell.common._
import docspell.notification.api.ChannelOrRef
import docspell.notification.api.Event
import docspell.notification.api.EventContext
import docspell.notification.api.NotificationChannel
import docspell.notification.impl.context.ItemSelectionCtx
import docspell.store.queries.ListItem
trait TaskOperations {
def withChannel[F[_]: Sync](
logger: Logger[F],
channel: ChannelOrRef,
ops: ONotification[F]
)(
cont: Vector[NotificationChannel] => F[Unit]
): F[Unit] = {
val channels = channel match {
case Right(ch) => ops.mkNotificationChannel(ch)
case Left(ref) => ops.findNotificationChannel(ref)
}
channels.flatMap { ch =>
if (ch.isEmpty)
logger.error(s"No channels found for the given data: ${channel}")
else cont(ch)
}
}
def withEventContext[F[_]](
logger: Logger[F],
account: AccountId,
baseUrl: Option[LenientUri],
items: Vector[ListItem],
limit: Int,
now: Timestamp
)(cont: EventContext => F[Unit]): F[Unit] =
NonEmptyList.fromFoldable(items) match {
case Some(nel) =>
val more = items.size >= limit
val eventCtx = ItemSelectionCtx(
Event.ItemSelection(account, nel.map(_.id), more, baseUrl),
ItemSelectionCtx.Data
.create(account, items, baseUrl, more, now)
)
cont(eventCtx)
case None =>
logger.info(s"The query selected no items. Notification aborted")
}
}
object TaskOperations extends TaskOperations

View File

@ -11,6 +11,7 @@ import cats.effect.std.Semaphore
import cats.implicits._
import fs2.concurrent.SignallingRef
import docspell.notification.api.EventSink
import docspell.pubsub.api.PubSubT
import docspell.store.Store
import docspell.store.queue.JobQueue
@ -21,7 +22,8 @@ case class SchedulerBuilder[F[_]: Async](
store: Store[F],
queue: Resource[F, JobQueue[F]],
logSink: LogSink[F],
pubSub: PubSubT[F]
pubSub: PubSubT[F],
eventSink: EventSink[F]
) {
def withConfig(cfg: SchedulerConfig): SchedulerBuilder[F] =
@ -45,6 +47,9 @@ case class SchedulerBuilder[F[_]: Async](
def withPubSub(pubSubT: PubSubT[F]): SchedulerBuilder[F] =
copy(pubSub = pubSubT)
def withEventSink(sink: EventSink[F]): SchedulerBuilder[F] =
copy(eventSink = sink)
def serve: Resource[F, Scheduler[F]] =
resource.evalMap(sch => Async[F].start(sch.start.compile.drain).map(_ => sch))
@ -58,6 +63,7 @@ case class SchedulerBuilder[F[_]: Async](
config,
jq,
pubSub,
eventSink,
tasks,
store,
logSink,
@ -83,7 +89,8 @@ object SchedulerBuilder {
store,
JobQueue(store),
LogSink.db[F](store),
PubSubT.noop[F]
PubSubT.noop[F],
EventSink.silent[F]
)
}

View File

@ -17,6 +17,8 @@ import docspell.backend.msg.JobDone
import docspell.common._
import docspell.common.syntax.all._
import docspell.joex.scheduler.SchedulerImpl._
import docspell.notification.api.Event
import docspell.notification.api.EventSink
import docspell.pubsub.api.PubSubT
import docspell.store.Store
import docspell.store.queries.QJob
@ -29,6 +31,7 @@ final class SchedulerImpl[F[_]: Async](
val config: SchedulerConfig,
queue: JobQueue[F],
pubSub: PubSubT[F],
eventSink: EventSink[F],
tasks: JobTaskRegistry[F],
store: Store[F],
logSink: LogSink[F],
@ -206,6 +209,17 @@ final class SchedulerImpl[F[_]: Async](
JobDone.topic,
JobDone(job.id, job.group, job.task, job.args, finalState)
)
_ <- eventSink.offer(
Event.JobDone(
job.id,
job.group,
job.task,
job.args,
job.state,
job.subject,
job.submitter
)
)
} yield ()
def onStart(job: RJob): F[Unit] =

View File

@ -27,6 +27,8 @@ paths:
description: |
Returns the version and project name and other properties of the build.
responses:
422:
description: BadRequest
200:
description: Ok
content:
@ -41,6 +43,8 @@ paths:
description: |
Notifies the job executor to wake up and look for jobs in th queue.
responses:
422:
description: BadRequest
200:
description: Ok
content:
@ -55,6 +59,8 @@ paths:
description: |
Returns all jobs this executor is currently executing.
responses:
422:
description: BadRequest
200:
description: Ok
content:
@ -69,6 +75,8 @@ paths:
description: |
Gracefully stops the scheduler and also stops the process.
responses:
422:
description: BadRequest
200:
description: Ok
content:
@ -85,6 +93,8 @@ paths:
parameters:
- $ref: "#/components/parameters/id"
responses:
422:
description: BadRequest
200:
description: Ok
content:
@ -103,6 +113,8 @@ paths:
parameters:
- $ref: "#/components/parameters/id"
responses:
422:
description: BadRequest
200:
description: Ok
content:

View File

@ -0,0 +1,59 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.jsonminiq
import cats.implicits._
/** The inverse to Parser */
private[jsonminiq] object Format {
def apply(q: JsonMiniQuery): Either[String, String] =
q match {
case JsonMiniQuery.Empty => Right("")
case JsonMiniQuery.Identity => Right("")
case JsonMiniQuery.Fields(fields) =>
Right(fields.toVector.mkString(","))
case JsonMiniQuery.Indexes(nums) =>
Right(nums.toVector.mkString("(", ",", ")"))
case JsonMiniQuery.Filter(values, mt) =>
formatValue(values.head).map(v => formatMatchType(mt) + v)
case JsonMiniQuery.Chain(self, next) =>
for {
s1 <- apply(self)
s2 <- apply(next)
res = next match {
case _: JsonMiniQuery.Fields =>
s1 + "." + s2
case _ =>
s1 + s2
}
} yield res
case JsonMiniQuery.Concat(inner) =>
inner.toVector.traverse(apply).map(_.mkString("[", " | ", "]"))
case JsonMiniQuery.Forall(inner) =>
inner.toVector.traverse(apply).map(_.mkString("[", " & ", "]"))
}
def formatValue(v: String): Either[String, String] =
if (v.forall(Parser.isValidSimpleValue)) Right(v)
else if (v.contains("\"") && v.contains("'"))
Left(s"Value cannot use both \" and ': $v")
else if (v.contains("'")) Right(s"\"$v\"")
else Right(s"'$v'")
def formatMatchType(matchType: JsonMiniQuery.MatchType): String =
matchType match {
case JsonMiniQuery.MatchType.All => "="
case JsonMiniQuery.MatchType.Any => ":"
case JsonMiniQuery.MatchType.None => "!"
}
}

View File

@ -0,0 +1,245 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.jsonminiq
import cats.Monoid
import cats.data.NonEmptyVector
import cats.implicits._
import io.circe.Decoder
import io.circe.Encoder
import io.circe.Json.Folder
import io.circe.{Json, JsonNumber, JsonObject}
/** Cteate a predicate for a Json value. */
sealed trait JsonMiniQuery { self =>
def apply(json: Json): Vector[Json]
def >>(next: JsonMiniQuery): JsonMiniQuery =
JsonMiniQuery.Chain(self, next)
def ++(other: JsonMiniQuery): JsonMiniQuery =
JsonMiniQuery.Concat(NonEmptyVector.of(self, other))
def thenAny(other: JsonMiniQuery, more: JsonMiniQuery*): JsonMiniQuery =
self >> JsonMiniQuery.or(other, more: _*)
def thenAll(other: JsonMiniQuery, more: JsonMiniQuery*): JsonMiniQuery =
self >> JsonMiniQuery.and(other, more: _*)
def at(field: String, fields: String*): JsonMiniQuery =
self >> JsonMiniQuery.Fields(NonEmptyVector(field, fields.toVector))
def at(index: Int, indexes: Int*): JsonMiniQuery =
self >> JsonMiniQuery.Indexes(NonEmptyVector(index, indexes.toVector))
def isAll(value: String, values: String*): JsonMiniQuery =
self >> JsonMiniQuery.Filter(
NonEmptyVector(value, values.toVector),
JsonMiniQuery.MatchType.All
)
def isAny(value: String, values: String*): JsonMiniQuery =
self >> JsonMiniQuery.Filter(
NonEmptyVector(value, values.toVector),
JsonMiniQuery.MatchType.Any
)
def is(value: String): JsonMiniQuery =
isAny(value)
def &&(other: JsonMiniQuery): JsonMiniQuery =
JsonMiniQuery.and(self, other)
def ||(other: JsonMiniQuery): JsonMiniQuery =
self ++ other
def matches(json: Json): Boolean =
apply(json).nonEmpty
def notMatches(json: Json): Boolean =
!matches(json)
/** Returns a string representation of this that can be parsed back to this value.
* Formatting can fail, because not everything is supported. The idea is that every
* value that was parsed, can be formatted.
*/
def asString: Either[String, String] =
Format(this)
def unsafeAsString: String =
asString.fold(sys.error, identity)
}
object JsonMiniQuery {
def parse(str: String): Either[String, JsonMiniQuery] =
Parser.query
.parseAll(str)
.leftMap(err =>
s"Unexpected input at ${err.failedAtOffset}. Expected: ${err.expected.toList.mkString(", ")}"
)
def unsafeParse(str: String): JsonMiniQuery =
parse(str).fold(sys.error, identity)
val root: JsonMiniQuery = Identity
val id: JsonMiniQuery = Identity
val none: JsonMiniQuery = Empty
def and(self: JsonMiniQuery, more: JsonMiniQuery*): JsonMiniQuery =
Forall(NonEmptyVector(self, more.toVector))
def or(self: JsonMiniQuery, more: JsonMiniQuery*): JsonMiniQuery =
Concat(NonEmptyVector(self, more.toVector))
// --- impl
case object Identity extends JsonMiniQuery {
def apply(json: Json) = Vector(json)
override def >>(next: JsonMiniQuery): JsonMiniQuery = next
}
case object Empty extends JsonMiniQuery {
def apply(json: Json) = Vector.empty
override def at(field: String, fields: String*): JsonMiniQuery = this
override def at(field: Int, fields: Int*): JsonMiniQuery = this
override def isAll(value: String, values: String*) = this
override def isAny(value: String, values: String*) = this
override def >>(next: JsonMiniQuery): JsonMiniQuery = this
override def ++(other: JsonMiniQuery): JsonMiniQuery = other
}
private def unwrapArrays(json: Vector[Json]): Vector[Json] =
json.foldLeft(Vector.empty[Json]) { (res, el) =>
el.asArray.map(x => res ++ x).getOrElse(res :+ el)
}
final case class Fields(names: NonEmptyVector[String]) extends JsonMiniQuery {
def apply(json: Json) = json.foldWith(folder)
private val folder: Folder[Vector[Json]] = new Folder[Vector[Json]] {
def onNull = Vector.empty
def onBoolean(value: Boolean) = Vector.empty
def onNumber(value: JsonNumber) = Vector.empty
def onString(value: String) = Vector.empty
def onArray(value: Vector[Json]) =
unwrapArrays(value.flatMap(inner => inner.foldWith(this)))
def onObject(value: JsonObject) =
unwrapArrays(names.toVector.flatMap(value.apply))
}
}
final case class Indexes(indexes: NonEmptyVector[Int]) extends JsonMiniQuery {
def apply(json: Json) = json.foldWith(folder)
private val folder: Folder[Vector[Json]] = new Folder[Vector[Json]] {
def onNull = Vector.empty
def onBoolean(value: Boolean) = Vector.empty
def onNumber(value: JsonNumber) = Vector.empty
def onString(value: String) = Vector.empty
def onArray(value: Vector[Json]) =
unwrapArrays(indexes.toVector.flatMap(i => value.get(i.toLong)))
def onObject(value: JsonObject) =
Vector.empty
}
}
sealed trait MatchType {
def monoid: Monoid[Boolean]
}
object MatchType {
case object Any extends MatchType {
val monoid = Monoid.instance(false, _ || _)
}
case object All extends MatchType {
val monoid = Monoid.instance(true, _ && _)
}
case object None extends MatchType { // = not Any
val monoid = Monoid.instance(true, _ && !_)
}
}
final case class Filter(
values: NonEmptyVector[String],
combine: MatchType
) extends JsonMiniQuery {
def apply(json: Json): Vector[Json] =
json.asArray match {
case Some(arr) =>
unwrapArrays(arr.filter(el => el.foldWith(folder(combine))))
case None =>
if (json.foldWith(folder(combine))) unwrapArrays(Vector(json))
else Vector.empty
}
private val anyMatch = folder(MatchType.Any)
private def folder(matchType: MatchType): Folder[Boolean] = new Folder[Boolean] {
def onNull =
onString("*null*")
def onBoolean(value: Boolean) =
values
.map(_.equalsIgnoreCase(value.toString))
.fold(matchType.monoid)
def onNumber(value: JsonNumber) =
values
.map(
_.equalsIgnoreCase(
value.toLong.map(_.toString).getOrElse(value.toDouble.toString)
)
)
.fold(matchType.monoid)
def onString(value: String) =
values
.map(_.equalsIgnoreCase(value))
.fold(matchType.monoid)
def onArray(value: Vector[Json]) =
value
.map(inner => inner.foldWith(anyMatch))
.fold(matchType.monoid.empty)(matchType.monoid.combine)
def onObject(value: JsonObject) = false
}
}
final case class Chain(self: JsonMiniQuery, next: JsonMiniQuery) extends JsonMiniQuery {
def apply(json: Json): Vector[Json] =
next(Json.fromValues(self(json)))
}
final case class Concat(qs: NonEmptyVector[JsonMiniQuery]) extends JsonMiniQuery {
def apply(json: Json): Vector[Json] =
qs.toVector.flatMap(_.apply(json))
}
final case class Forall(qs: NonEmptyVector[JsonMiniQuery]) extends JsonMiniQuery {
def apply(json: Json): Vector[Json] =
combineWhenNonEmpty(qs.toVector.map(_.apply(json)), Vector.empty)
@annotation.tailrec
private def combineWhenNonEmpty(
values: Vector[Vector[Json]],
result: Vector[Json]
): Vector[Json] =
values.headOption match {
case Some(v) if v.nonEmpty => combineWhenNonEmpty(values.tail, result ++ v)
case Some(_) => Vector.empty
case None => result
}
}
implicit val jsonDecoder: Decoder[JsonMiniQuery] =
Decoder.decodeString.emap(parse)
implicit val jsonEncoder: Encoder[JsonMiniQuery] =
Encoder.encodeString.contramap(_.unsafeAsString)
}

View File

@ -0,0 +1,105 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.jsonminiq
import cats.data.NonEmptyVector
import cats.parse.{Parser => P, Parser0 => P0}
import docspell.jsonminiq.JsonMiniQuery.{Identity => JQ}
private[jsonminiq] object Parser {
// a[,b] -> at(string)
// (1[,2,3]) -> at(int)
// :b -> isAny(b)
// =b -> isAll(b)
// [F & G] -> F && G
// [F | G] -> F || G
private[this] val whitespace: P[Unit] = P.charIn(" \t\r\n").void
private[this] val extraFieldChars = "_-".toSet
private[this] val dontUse = "\"'\\[]()&|".toSet
private[this] val ws0: P0[Unit] = whitespace.rep0.void
private[this] val parenOpen: P[Unit] = P.char('(') *> ws0
private[this] val parenClose: P[Unit] = ws0.with1 *> P.char(')')
private[this] val bracketOpen: P[Unit] = P.char('[') *> ws0
private[this] val bracketClose: P[Unit] = ws0.with1 *> P.char(']')
private[this] val dot: P[Unit] = P.char('.')
private[this] val comma: P[Unit] = P.char(',')
private[this] val andSym: P[Unit] = ws0.with1 *> P.char('&') <* ws0
private[this] val orSym: P[Unit] = ws0.with1 *> P.char('|') <* ws0
private[this] val squote: P[Unit] = P.char('\'')
private[this] val dquote: P[Unit] = P.char('"')
private[this] val allOp: P[JsonMiniQuery.MatchType] =
P.char('=').as(JsonMiniQuery.MatchType.All)
private[this] val noneOp: P[JsonMiniQuery.MatchType] =
P.char('!').as(JsonMiniQuery.MatchType.None)
def isValidSimpleValue(c: Char): Boolean =
c > ' ' && !dontUse.contains(c)
val value: P[String] = {
val simpleString: P[String] =
P.charsWhile(isValidSimpleValue)
val quotedString: P[String] = {
val single: P[String] =
squote *> P.charsWhile0(_ != '\'') <* squote
val double: P[String] =
dquote *> P.charsWhile0(_ != '"') <* dquote
single | double
}
simpleString | quotedString
}
val field: P[String] =
P.charsWhile(c => c.isLetterOrDigit || extraFieldChars.contains(c))
val posNum: P[Int] = P.charsWhile(_.isDigit).map(_.toInt).filter(_ >= 0)
val fieldSelect1: P[JsonMiniQuery] =
field.repSep(comma).map(nel => JQ.at(nel.head, nel.tail: _*))
val arraySelect1: P[JsonMiniQuery] = {
val nums = posNum.repSep(1, comma)
parenOpen.soft *> nums.map(f => JQ.at(f.head, f.tail: _*)) <* parenClose
}
val match1: P[JsonMiniQuery] =
((allOp | noneOp) ~ value).map { case (op, v) =>
JsonMiniQuery.Filter(NonEmptyVector.of(v), op)
}
val segment = {
val firstSegment = fieldSelect1 | arraySelect1 | match1
val nextSegment = (dot *> fieldSelect1) | arraySelect1 | match1
(firstSegment ~ nextSegment.rep0).map { case (head, tail) =>
tail.foldLeft(head)(_ >> _)
}
}
def combine(inner: P[JsonMiniQuery]): P[JsonMiniQuery] = {
val or = inner.repSep(orSym).map(_.reduceLeft(_ || _))
val and = inner.repSep(andSym).map(_.reduceLeft(_ && _))
and
.between(bracketOpen, bracketClose)
.backtrack
.orElse(or.between(bracketOpen, bracketClose))
}
val query: P[JsonMiniQuery] =
P.recursive[JsonMiniQuery] { recurse =>
val comb = combine(recurse)
P.oneOf(segment :: comb :: Nil).rep.map(_.reduceLeft(_ >> _))
}
}

View File

@ -0,0 +1,76 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.jsonminiq
import cats.parse.{Parser => P}
import io.circe.Json
trait Fixtures {
val sampleEvent: Json =
parseJson(
"""{
| "eventType": "TagsChanged",
| "account": {
| "collective": "demo",
| "user": "demo",
| "login": "demo"
| },
| "content": {
| "account": "demo",
| "items": [
| {
| "id": "4PvMM4m7Fwj-FsPRGxYt9zZ-uUzi35S2rEX-usyDEVyheR8",
| "name": "MapleSirupLtd_202331.pdf",
| "dateMillis": 1633557740733,
| "date": "2021-10-06",
| "direction": "incoming",
| "state": "confirmed",
| "dueDateMillis": 1639173740733,
| "dueDate": "2021-12-10",
| "source": "webapp",
| "overDue": false,
| "dueIn": "in 3 days",
| "corrOrg": "Acme AG",
| "notes": null
| }
| ],
| "added": [
| {
| "id": "Fy4VC6hQwcL-oynrHaJg47D-Q5RiQyB5PQP-N5cFJ368c4N",
| "name": "Invoice",
| "category": "doctype"
| },
| {
| "id": "7zaeU6pqVym-6Je3Q36XNG2-ZdBTFSVwNjc-pJRXciTMP3B",
| "name": "Grocery",
| "category": "expense"
| }
| ],
| "removed": [
| {
| "id": "GbXgszdjBt4-zrzuLHoUx7N-RMFatC8CyWt-5dsBCvxaEuW",
| "name": "Receipt",
| "category": "doctype"
| }
| ],
| "itemUrl": "http://localhost:7880/app/item"
| }
|}""".stripMargin
)
def parseJson(str: String): Json =
io.circe.parser.parse(str).fold(throw _, identity)
def parseP[A](p: P[A], str: String): A =
p.parseAll(str.trim())
.fold(e => sys.error(s"${e.getClass}: $e"), identity)
def parse(str: String): JsonMiniQuery = parseP(Parser.query, str)
}

View File

@ -0,0 +1,52 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.jsonminiq
import docspell.jsonminiq.JsonMiniQuery.{Identity => JQ}
import munit._
class FormatTest extends FunSuite with Fixtures {
def format(q: JsonMiniQuery): String =
q.unsafeAsString
test("field selects") {
assertEquals(
format(JQ.at("content").at("added", "removed").at("name")),
"content.added,removed.name"
)
}
test("array select") {
assertEquals(format(JQ.at("content").at(1, 2).at("name")), "content(1,2).name")
}
test("anyMatch / allMatch") {
assertEquals(format(JQ.isAny("in voice")), ":'in voice'")
assertEquals(format(JQ.isAll("invoice")), "=invoice")
assertEquals(format(JQ.at("name").isAll("invoice")), "name=invoice")
assertEquals(format(JQ.at("name").isAny("invoice")), "name:invoice")
}
test("and / or") {
assertEquals(
format((JQ.at("c") >> JQ.isAll("d")) || (JQ.at("e") >> JQ.isAll("f"))),
"[c=d | e=f]"
)
assertEquals(
format(
(JQ.at("a").isAll("1")) || (
(JQ.at("b").isAll("2")) && (JQ.at("c").isAll("3"))
)
),
"[a=1 | [b=2 & c=3]]"
)
}
}

View File

@ -0,0 +1,161 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.jsonminiq
import docspell.jsonminiq.JsonMiniQuery.{Identity => JQ}
import io.circe.Encoder
import io.circe.Json
import io.circe.syntax._
import munit._
class JsonMiniQueryTest extends FunSuite with Fixtures {
def values[T: Encoder](v1: T, vn: T*): Vector[Json] =
(v1 +: vn.toVector).map(_.asJson)
test("combine values on same level") {
val q = JQ
.at("content")
.at("added", "removed")
.at("name")
assertEquals(q(sampleEvent), values("Invoice", "Grocery", "Receipt"))
}
test("combine values from different levels") {
val q1 = JQ.at("account")
val q2 = JQ.at("removed").at("name")
val q = JQ.at("content") >> (q1 ++ q2)
assertEquals(q(sampleEvent), values("demo", "Receipt"))
}
test("filter single value") {
val q = JQ.at("account").at("login").isAll("demo")
assertEquals(q(sampleEvent), values("demo"))
val q2 = JQ.at("account").at("login").isAll("james")
assertEquals(q2(sampleEvent), Vector.empty)
}
test("combine filters") {
val q1 = JQ.at("account").at("login").isAll("demo")
val q2 = JQ.at("eventType").isAll("tagschanged")
val q3 = JQ.at("content").at("added", "removed").at("name").isAny("invoice")
val q = q1 && q2 && q3
assertEquals(
q(sampleEvent),
values("demo", "TagsChanged", "Invoice")
)
val q11 = JQ.at("account").at("login").isAll("not-exists")
val r = q11 && q2 && q3
assertEquals(r(sampleEvent), Vector.empty)
}
//content.[added,removed].(category=expense & name=grocery)
test("combine fields and filter") {
val andOk = JQ.at("content").at("added", "removed") >>
(JQ.at("name").is("grocery") && JQ.at("category").is("expense"))
assert(andOk.matches(sampleEvent))
val andNotOk = JQ.at("content").at("added", "removed") >>
(JQ.at("name").is("grocery") && JQ.at("category").is("notexist"))
assert(andNotOk.notMatches(sampleEvent))
val orOk = JQ.at("content").at("added", "removed") >>
(JQ.at("name").is("grocery") || JQ.at("category").is("notexist"))
assert(orOk.matches(sampleEvent))
}
test("thenAny combine via or") {
val q = JQ
.at("content")
.thenAny(
JQ.is("not this"),
JQ.at("account"),
JQ.at("oops")
)
assert(q.matches(sampleEvent))
}
test("thenAll combine via and (1)") {
val q = JQ
.at("content")
.thenAll(
JQ.is("not this"),
JQ.at("account"),
JQ.at("oops")
)
assert(q.notMatches(sampleEvent))
}
test("thenAll combine via and (2)") {
val q = JQ
.at("content")
.thenAll(
JQ.at("items").at("date").is("2021-10-06"),
JQ.at("account"),
JQ.at("added").at("name")
)
assert(q.matches(sampleEvent))
// equivalent
val q2 = JQ.at("content") >> (
JQ.at("items").at("date").is("2021-10-06") &&
JQ.at("account") &&
JQ.at("added").at("name")
)
assert(q2.matches(sampleEvent))
}
test("test for null/not null") {
val q1 = parse("content.items.notes=*null*")
assert(q1.matches(sampleEvent))
val q2 = parse("content.items.notes=bla")
assert(q2.notMatches(sampleEvent))
val q3 = parse("content.items.notes!*null*")
assert(q3.notMatches(sampleEvent))
}
test("more real expressions") {
val q = parse("content.added,removed[name=invoice | category=expense]")
assert(q.matches(sampleEvent))
}
test("examples") {
val q0 = parse("a.b.x,y")
val json = parseJson(
"""[{"a": {"b": {"x": 1, "y":2}}, "v": 0}, {"a": {"b": {"y": 9, "b": 2}}, "z": 0}]"""
)
assertEquals(q0(json), values(1, 2, 9))
val q1 = parse("a(0,2)")
val json1 = parseJson("""[{"a": [10,9,8,7]}, {"a": [1,2,3,4]}]""")
assertEquals(q1(json1), values(10, 8))
val q2 = parse("=blue")
val json2 = parseJson("""["blue", "green", "red"]""")
assertEquals(q2(json2), values("blue"))
val q3 = parse("color=blue")
val json3 = parseJson(
"""[{"color": "blue", "count": 2}, {"color": "blue", "count": 1}, {"color": "blue", "count": 3}]"""
)
assertEquals(q3(json3), values("blue", "blue", "blue"))
val q4 = parse("[count=6 | name=max]")
val json4 = parseJson(
"""[{"name":"max", "count":4}, {"name":"me", "count": 3}, {"name":"max", "count": 3}]"""
)
println(q4(json4))
}
}

View File

@ -0,0 +1,54 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.jsonminiq
import docspell.jsonminiq.JsonMiniQuery.{Identity => JQ}
import munit._
class ParserTest extends FunSuite with Fixtures {
test("field selects") {
assertEquals(
parse("content.added,removed.name"),
JQ.at("content").at("added", "removed").at("name")
)
}
test("array select") {
assertEquals(parse("content(1,2).name"), JQ.at("content").at(1, 2).at("name"))
}
test("values") {
assertEquals(parseP(Parser.value, "\"in voice\""), "in voice")
assertEquals(parseP(Parser.value, "'in voice'"), "in voice")
assertEquals(parseP(Parser.value, "invoice"), "invoice")
intercept[Throwable](parseP(Parser.value, "in voice"))
}
test("anyMatch / allMatch") {
assertEquals(parse("='invoice'"), JQ.isAll("invoice"))
assertEquals(parse("=invoice"), JQ.isAll("invoice"))
assertEquals(parse("name=invoice"), JQ.at("name").isAll("invoice"))
assertEquals(parse("name=\"invoice\""), JQ.at("name").isAll("invoice"))
}
test("and / or") {
assertEquals(
parse("[c=d | e=f]"),
(JQ.at("c") >> JQ.isAll("d")) || (JQ.at("e") >> JQ.isAll("f"))
)
assertEquals(
parse("[a=1 | [b=2 & c=3]]"),
(JQ.at("a") >> JQ.isAll("1")) || (
(JQ.at("b") >> JQ.isAll("2")) && (JQ.at("c") >> JQ.isAll("3"))
)
)
}
}

View File

@ -0,0 +1,112 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import cats.data.{NonEmptyList => Nel}
import docspell.common._
import emil.MailAddress
import io.circe.generic.extras.Configuration
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
import io.circe.{Decoder, Encoder}
/** A type for representing channels as stored in the database. */
sealed trait Channel {
def id: Ident
def channelType: ChannelType
def fold[A](
f1: Channel.Mail => A,
f2: Channel.Gotify => A,
f3: Channel.Matrix => A,
f4: Channel.Http => A
): A
def asRef: ChannelRef = ChannelRef(id, channelType)
}
object Channel {
implicit val jsonConfig = Configuration.default.withDiscriminator("channelType")
final case class Mail(
id: Ident,
connection: Ident,
recipients: Nel[MailAddress]
) extends Channel {
val channelType = ChannelType.Mail
def fold[A](
f1: Mail => A,
f2: Gotify => A,
f3: Matrix => A,
f4: Http => A
): A = f1(this)
}
object Mail {
implicit def jsonDecoder(implicit D: Decoder[MailAddress]): Decoder[Mail] =
deriveConfiguredDecoder[Mail]
implicit def jsonEncoder(implicit E: Encoder[MailAddress]): Encoder[Mail] =
deriveConfiguredEncoder[Mail]
}
final case class Gotify(id: Ident, url: LenientUri, appKey: Password) extends Channel {
val channelType = ChannelType.Gotify
def fold[A](
f1: Mail => A,
f2: Gotify => A,
f3: Matrix => A,
f4: Http => A
): A = f2(this)
}
object Gotify {
implicit val jsonDecoder: Decoder[Gotify] =
deriveConfiguredDecoder
implicit val jsonEncoder: Encoder[Gotify] =
deriveConfiguredEncoder
}
final case class Matrix(
id: Ident,
homeServer: LenientUri,
roomId: String,
accessToken: Password
) extends Channel {
val channelType = ChannelType.Matrix
def fold[A](
f1: Mail => A,
f2: Gotify => A,
f3: Matrix => A,
f4: Http => A
): A = f3(this)
}
object Matrix {
implicit val jsonDecoder: Decoder[Matrix] = deriveConfiguredDecoder
implicit val jsonEncoder: Encoder[Matrix] = deriveConfiguredEncoder
}
final case class Http(id: Ident, url: LenientUri) extends Channel {
val channelType = ChannelType.Http
def fold[A](
f1: Mail => A,
f2: Gotify => A,
f3: Matrix => A,
f4: Http => A
): A = f4(this)
}
object Http {
implicit val jsonDecoder: Decoder[Http] = deriveConfiguredDecoder
implicit val jsonEncoder: Encoder[Http] = deriveConfiguredEncoder
}
implicit def jsonDecoder(implicit mc: Decoder[MailAddress]): Decoder[Channel] =
deriveConfiguredDecoder
implicit def jsonEncoder(implicit mc: Encoder[MailAddress]): Encoder[Channel] =
deriveConfiguredEncoder
}

View File

@ -0,0 +1,24 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import docspell.common.Ident
import io.circe.Decoder
import io.circe.Encoder
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
final case class ChannelRef(id: Ident, channelType: ChannelType)
object ChannelRef {
implicit val jsonDecoder: Decoder[ChannelRef] =
deriveDecoder
implicit val jsonEncoder: Encoder[ChannelRef] =
deriveEncoder
}

View File

@ -0,0 +1,46 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import cats.data.{NonEmptyList => Nel}
import io.circe.Decoder
import io.circe.Encoder
sealed trait ChannelType { self: Product =>
def name: String =
productPrefix
}
object ChannelType {
case object Mail extends ChannelType
case object Gotify extends ChannelType
case object Matrix extends ChannelType
case object Http extends ChannelType
val all: Nel[ChannelType] =
Nel.of(Mail, Gotify, Matrix, Http)
def fromString(str: String): Either[String, ChannelType] =
str.toLowerCase match {
case "mail" => Right(Mail)
case "gotify" => Right(Gotify)
case "matrix" => Right(Matrix)
case "http" => Right(Http)
case _ => Left(s"Unknown channel type: $str")
}
def unsafeFromString(str: String): ChannelType =
fromString(str).fold(sys.error, identity)
implicit val jsonDecoder: Decoder[ChannelType] =
Decoder.decodeString.emap(fromString)
implicit val jsonEncoder: Encoder[ChannelType] =
Encoder.encodeString.contramap(_.name)
}

View File

@ -0,0 +1,246 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import cats.data.{NonEmptyList => Nel}
import cats.effect.kernel.Sync
import cats.implicits._
import docspell.common._
import io.circe.{Decoder, Encoder}
/** An event generated in the platform. */
sealed trait Event {
/** The type of event */
def eventType: EventType
/** The user who caused it. */
def account: AccountId
/** The base url for generating links. This is dynamic. */
def baseUrl: Option[LenientUri]
}
sealed trait EventType { self: Product =>
def name: String =
productPrefix
}
object EventType {
def all: Nel[EventType] =
Nel.of(
Event.TagsChanged,
Event.SetFieldValue,
Event.DeleteFieldValue,
Event.ItemSelection,
Event.JobSubmitted,
Event.JobDone
)
def fromString(str: String): Either[String, EventType] =
all.find(_.name.equalsIgnoreCase(str)).toRight(s"Unknown event type: $str")
def unsafeFromString(str: String): EventType =
fromString(str).fold(sys.error, identity)
implicit val jsonDecoder: Decoder[EventType] =
Decoder.decodeString.emap(fromString)
implicit val jsonEncoder: Encoder[EventType] =
Encoder.encodeString.contramap(_.name)
}
object Event {
/** Event triggered when tags of one or more items have changed */
final case class TagsChanged(
account: AccountId,
items: Nel[Ident],
added: List[String],
removed: List[String],
baseUrl: Option[LenientUri]
) extends Event {
val eventType = TagsChanged
}
case object TagsChanged extends EventType {
def partial(
items: Nel[Ident],
added: List[String],
removed: List[String]
): (AccountId, Option[LenientUri]) => TagsChanged =
(acc, url) => TagsChanged(acc, items, added, removed, url)
def sample[F[_]: Sync](
account: AccountId,
baseUrl: Option[LenientUri]
): F[TagsChanged] =
for {
id1 <- Ident.randomId[F]
id2 <- Ident.randomId[F]
id3 <- Ident.randomId[F]
} yield TagsChanged(account, Nel.of(id1), List(id2.id), List(id3.id), baseUrl)
}
/** Event triggered when a custom field on an item changes. */
final case class SetFieldValue(
account: AccountId,
items: Nel[Ident],
field: Ident,
value: String,
baseUrl: Option[LenientUri]
) extends Event {
val eventType = SetFieldValue
}
case object SetFieldValue extends EventType {
def partial(
items: Nel[Ident],
field: Ident,
value: String
): (AccountId, Option[LenientUri]) => SetFieldValue =
(acc, url) => SetFieldValue(acc, items, field, value, url)
def sample[F[_]: Sync](
account: AccountId,
baseUrl: Option[LenientUri]
): F[SetFieldValue] =
for {
id1 <- Ident.randomId[F]
id2 <- Ident.randomId[F]
} yield SetFieldValue(account, Nel.of(id1), id2, "10.15", baseUrl)
}
final case class DeleteFieldValue(
account: AccountId,
items: Nel[Ident],
field: Ident,
baseUrl: Option[LenientUri]
) extends Event {
val eventType = DeleteFieldValue
}
case object DeleteFieldValue extends EventType {
def partial(
items: Nel[Ident],
field: Ident
): (AccountId, Option[LenientUri]) => DeleteFieldValue =
(acc, url) => DeleteFieldValue(acc, items, field, url)
def sample[F[_]: Sync](
account: AccountId,
baseUrl: Option[LenientUri]
): F[DeleteFieldValue] =
for {
id1 <- Ident.randomId[F]
id2 <- Ident.randomId[F]
} yield DeleteFieldValue(account, Nel.of(id1), id2, baseUrl)
}
/** Some generic list of items, chosen by a user. */
final case class ItemSelection(
account: AccountId,
items: Nel[Ident],
more: Boolean,
baseUrl: Option[LenientUri]
) extends Event {
val eventType = ItemSelection
}
case object ItemSelection extends EventType {
def sample[F[_]: Sync](
account: AccountId,
baseUrl: Option[LenientUri]
): F[ItemSelection] =
for {
id1 <- Ident.randomId[F]
id2 <- Ident.randomId[F]
} yield ItemSelection(account, Nel.of(id1, id2), true, baseUrl)
}
/** Event when a new job is added to the queue */
final case class JobSubmitted(
jobId: Ident,
group: Ident,
task: Ident,
args: String,
state: JobState,
subject: String,
submitter: Ident
) extends Event {
val eventType = JobSubmitted
val baseUrl = None
def account: AccountId = AccountId(group, submitter)
}
case object JobSubmitted extends EventType {
def sample[F[_]: Sync](account: AccountId): F[JobSubmitted] =
for {
id <- Ident.randomId[F]
ev = JobSubmitted(
id,
account.collective,
Ident.unsafe("process-something-task"),
"",
JobState.running,
"Process 3 files",
account.user
)
} yield ev
}
/** Event when a job is finished (in final state). */
final case class JobDone(
jobId: Ident,
group: Ident,
task: Ident,
args: String,
state: JobState,
subject: String,
submitter: Ident
) extends Event {
val eventType = JobDone
val baseUrl = None
def account: AccountId = AccountId(group, submitter)
}
case object JobDone extends EventType {
def sample[F[_]: Sync](account: AccountId): F[JobDone] =
for {
id <- Ident.randomId[F]
ev = JobDone(
id,
account.collective,
Ident.unsafe("process-something-task"),
"",
JobState.running,
"Process 3 files",
account.user
)
} yield ev
}
def sample[F[_]: Sync](
evt: EventType,
account: AccountId,
baseUrl: Option[LenientUri]
): F[Event] =
evt match {
case TagsChanged =>
TagsChanged.sample[F](account, baseUrl).map(x => x: Event)
case SetFieldValue =>
SetFieldValue.sample[F](account, baseUrl).map(x => x: Event)
case ItemSelection =>
ItemSelection.sample[F](account, baseUrl).map(x => x: Event)
case JobSubmitted =>
JobSubmitted.sample[F](account).map(x => x: Event)
case JobDone =>
JobDone.sample[F](account).map(x => x: Event)
case DeleteFieldValue =>
DeleteFieldValue.sample[F](account, baseUrl).map(x => x: Event)
}
}

View File

@ -0,0 +1,23 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import docspell.notification.api.Event._
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
import io.circe.{Decoder, Encoder}
trait EventCodec {
implicit val tagsChangedDecoder: Decoder[TagsChanged] = deriveDecoder
implicit val tagsChangedEncoder: Encoder[TagsChanged] = deriveEncoder
implicit val eventDecoder: Decoder[Event] =
deriveDecoder
implicit val eventEncoder: Encoder[Event] =
deriveEncoder
}

View File

@ -0,0 +1,91 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import cats.Applicative
import cats.Functor
import cats.data.Kleisli
import cats.data.OptionT
import io.circe.Json
import io.circe.syntax._
trait EventContext {
def event: Event
def content: Json
lazy val asJson: Json =
Json.obj(
"eventType" -> event.eventType.asJson,
"account" -> Json.obj(
"collective" -> event.account.collective.asJson,
"user" -> event.account.user.asJson,
"login" -> event.account.asJson
),
"content" -> content
)
def defaultTitle: String
def defaultTitleHtml: String
def defaultBody: String
def defaultBodyHtml: String
def defaultBoth: String
def defaultBothHtml: String
lazy val asJsonWithMessage: Json = {
val data = asJson
val msg = Json.obj(
"message" -> Json.obj(
"title" -> defaultTitle.asJson,
"body" -> defaultBody.asJson
),
"messageHtml" -> Json.obj(
"title" -> defaultTitleHtml.asJson,
"body" -> defaultBodyHtml.asJson
)
)
data.withObject(o1 => msg.withObject(o2 => o1.deepMerge(o2).asJson))
}
}
object EventContext {
def empty[F[_]](ev: Event): EventContext =
new EventContext {
val event = ev
def content = Json.obj()
def defaultTitle = ""
def defaultTitleHtml = ""
def defaultBody = ""
def defaultBodyHtml = ""
def defaultBoth: String = ""
def defaultBothHtml: String = ""
}
/** For an event, the context can be created that is usually amended with more
* information. Since these information may be missing, it is possible that no context
* can be created.
*/
type Factory[F[_], E <: Event] = Kleisli[OptionT[F, *], E, EventContext]
def factory[F[_]: Functor, E <: Event](
run: E => F[EventContext]
): Factory[F, E] =
Kleisli(run).mapK(OptionT.liftK[F])
def pure[F[_]: Applicative, E <: Event](run: E => EventContext): Factory[F, E] =
factory(ev => Applicative[F].pure(run(ev)))
type Example[F[_], E <: Event] = Kleisli[F, E, EventContext]
def example[F[_], E <: Event](run: E => F[EventContext]): Example[F, E] =
Kleisli(run)
}

View File

@ -0,0 +1,54 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import cats.Applicative
import cats.data.Kleisli
import cats.effect._
import cats.effect.std.Queue
import cats.implicits._
import fs2.Stream
import docspell.common.Logger
/** Combines a sink and reader to a place where events can be submitted and processed in a
* producer-consumer manner.
*/
trait EventExchange[F[_]] extends EventSink[F] with EventReader[F] {}
object EventExchange {
private[this] val logger = org.log4s.getLogger
def silent[F[_]: Applicative]: EventExchange[F] =
new EventExchange[F] {
def offer(event: Event): F[Unit] =
EventSink.silent[F].offer(event)
def consume(maxConcurrent: Int)(run: Kleisli[F, Event, Unit]): Stream[F, Nothing] =
Stream.empty.covary[F]
}
def circularQueue[F[_]: Async](queueSize: Int): F[EventExchange[F]] =
Queue.circularBuffer[F, Event](queueSize).map(q => new Impl(q))
final class Impl[F[_]: Async](queue: Queue[F, Event]) extends EventExchange[F] {
private[this] val log = Logger.log4s[F](logger)
def offer(event: Event): F[Unit] =
log.debug(s"Pushing event to queue: $event") *>
queue.offer(event)
private val logEvent: Kleisli[F, Event, Unit] =
Kleisli(ev => log.debug(s"Consuming event: $ev"))
def consume(maxConcurrent: Int)(run: Kleisli[F, Event, Unit]): Stream[F, Nothing] = {
val stream = Stream.repeatEval(queue.take).evalMap((logEvent >> run).run)
log.s.info(s"Starting up $maxConcurrent notification event consumers").drain ++
Stream(stream).repeat.take(maxConcurrent.toLong).parJoin(maxConcurrent).drain
}
}
}

View File

@ -0,0 +1,17 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import cats.data.Kleisli
import fs2.Stream
trait EventReader[F[_]] {
/** Stream to allow processing of events offered via a `EventSink` */
def consume(maxConcurrent: Int)(run: Kleisli[F, Event, Unit]): Stream[F, Nothing]
}

View File

@ -0,0 +1,25 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import cats.Applicative
import cats.implicits._
trait EventSink[F[_]] {
/** Submit the event for asynchronous processing. */
def offer(event: Event): F[Unit]
}
object EventSink {
def apply[F[_]](run: Event => F[Unit]): EventSink[F] =
(event: Event) => run(event)
def silent[F[_]: Applicative]: EventSink[F] =
EventSink(_ => ().pure[F])
}

View File

@ -0,0 +1,89 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import cats.Applicative
import cats.data.NonEmptyList
import cats.effect._
import cats.implicits._
import cats.kernel.Monoid
import fs2.Stream
import docspell.common._
/** Pushes notification messages/events to an external system */
trait NotificationBackend[F[_]] {
def send(event: EventContext): F[Unit]
}
object NotificationBackend {
def apply[F[_]](run: EventContext => F[Unit]): NotificationBackend[F] =
(event: EventContext) => run(event)
def silent[F[_]: Applicative]: NotificationBackend[F] =
NotificationBackend(_ => ().pure[F])
def combine[F[_]: Concurrent](
ba: NotificationBackend[F],
bb: NotificationBackend[F]
): NotificationBackend[F] =
(ba, bb) match {
case (a: Combined[F], b: Combined[F]) =>
Combined(a.delegates.concatNel(b.delegates))
case (a: Combined[F], _) =>
Combined(bb :: a.delegates)
case (_, b: Combined[F]) =>
Combined(ba :: b.delegates)
case (_, _) =>
Combined(NonEmptyList.of(ba, bb))
}
def ignoreErrors[F[_]: Sync](
logger: Logger[F]
)(nb: NotificationBackend[F]): NotificationBackend[F] =
NotificationBackend { event =>
nb.send(event).attempt.flatMap {
case Right(_) =>
logger.debug(s"Successfully sent notification: $event")
case Left(ex) =>
logger.error(ex)(s"Error sending notification: $event")
}
}
final private case class Combined[F[_]: Concurrent](
delegates: NonEmptyList[NotificationBackend[F]]
) extends NotificationBackend[F] {
val parNum = math.max(2, Runtime.getRuntime.availableProcessors() * 2)
def send(event: EventContext): F[Unit] =
Stream
.emits(delegates.toList)
.covary[F]
.parEvalMapUnordered(math.min(delegates.size, parNum))(_.send(event))
.drain
.compile
.drain
}
def combineAll[F[_]: Concurrent](
bes: NonEmptyList[NotificationBackend[F]]
): NotificationBackend[F] =
bes.tail match {
case Nil => bes.head
case next :: Nil =>
Combined(NonEmptyList.of(bes.head, next))
case next :: more =>
val first: NotificationBackend[F] = Combined(NonEmptyList.of(bes.head, next))
more.foldLeft(first)(combine)
}
implicit def monoid[F[_]: Concurrent]: Monoid[NotificationBackend[F]] =
Monoid.instance(silent[F], combine[F])
}

View File

@ -0,0 +1,41 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import cats.data.NonEmptyList
import docspell.common._
import emil._
sealed trait NotificationChannel { self: Product =>
def name: String =
productPrefix.toLowerCase
}
object NotificationChannel {
final case class Email(
config: MailConfig,
from: MailAddress,
recipients: NonEmptyList[MailAddress]
) extends NotificationChannel
final case class HttpPost(
url: LenientUri,
headers: Map[String, String]
) extends NotificationChannel
final case class Gotify(url: LenientUri, appKey: Password) extends NotificationChannel
final case class Matrix(
homeServer: LenientUri,
roomId: String,
accessToken: Password,
messageType: String
) extends NotificationChannel
}

View File

@ -0,0 +1,62 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import cats.Applicative
import cats.data.{Kleisli, OptionT}
import cats.implicits._
import fs2.Stream
import docspell.common.Logger
trait NotificationModule[F[_]]
extends EventSink[F]
with EventReader[F]
with EventExchange[F] {
/** Sends an event as notification through configured channels. */
def notifyEvent: Kleisli[F, Event, Unit]
/** Send the event data via the given channels. */
def send(
logger: Logger[F],
event: EventContext,
channels: Seq[NotificationChannel]
): F[Unit]
/** Amend an event with additional data. */
def eventContext: EventContext.Factory[F, Event]
/** Create an example event context. */
def sampleEvent: EventContext.Example[F, Event]
/** Consume all offered events asynchronously. */
def consumeAllEvents(maxConcurrent: Int): Stream[F, Nothing] =
consume(maxConcurrent)(notifyEvent)
}
object NotificationModule {
def noop[F[_]: Applicative]: NotificationModule[F] =
new NotificationModule[F] {
val noSend = NotificationBackend.silent[F]
val noExchange = EventExchange.silent[F]
def notifyEvent = Kleisli(_ => ().pure[F])
def eventContext = Kleisli(_ => OptionT.none[F, EventContext])
def sampleEvent = EventContext.example(ev => EventContext.empty(ev).pure[F])
def send(
logger: Logger[F],
event: EventContext,
channels: Seq[NotificationChannel]
) =
noSend.send(event)
def offer(event: Event) = noExchange.offer(event)
def consume(maxConcurrent: Int)(run: Kleisli[F, Event, Unit]) =
noExchange.consume(maxConcurrent)(run)
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import docspell.common._
import emil.MailAddress
import io.circe.generic.semiauto
import io.circe.{Decoder, Encoder}
final case class PeriodicDueItemsArgs(
account: AccountId,
channel: ChannelOrRef,
remindDays: Int,
daysBack: Option[Int],
tagsInclude: List[Ident],
tagsExclude: List[Ident],
baseUrl: Option[LenientUri]
)
object PeriodicDueItemsArgs {
val taskName = Ident.unsafe("periodic-due-items-notify")
implicit def jsonDecoder(implicit
mc: Decoder[MailAddress]
): Decoder[PeriodicDueItemsArgs] = {
implicit val x = ChannelOrRef.jsonDecoder
semiauto.deriveDecoder
}
implicit def jsonEncoder(implicit
mc: Encoder[MailAddress]
): Encoder[PeriodicDueItemsArgs] = {
implicit val x = ChannelOrRef.jsonEncoder
semiauto.deriveEncoder
}
}

View File

@ -0,0 +1,38 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.api
import docspell.common._
import emil.MailAddress
import io.circe.generic.semiauto
import io.circe.{Decoder, Encoder}
final case class PeriodicQueryArgs(
account: AccountId,
channel: ChannelOrRef,
query: ItemQueryString,
baseUrl: Option[LenientUri]
)
object PeriodicQueryArgs {
val taskName = Ident.unsafe("periodic-query-notify")
implicit def jsonDecoder(implicit
mc: Decoder[MailAddress]
): Decoder[PeriodicQueryArgs] = {
implicit val x = ChannelOrRef.jsonDecoder
semiauto.deriveDecoder
}
implicit def jsonEncoder(implicit
mc: Encoder[MailAddress]
): Encoder[PeriodicQueryArgs] = {
implicit val x = ChannelOrRef.jsonEncoder
semiauto.deriveEncoder
}
}

View File

@ -0,0 +1,29 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification
import emil.MailAddress
import io.circe.{Decoder, Encoder}
package object api {
type ChannelOrRef = Either[ChannelRef, Channel]
object ChannelOrRef {
implicit def jsonDecoder(implicit mc: Decoder[MailAddress]): Decoder[ChannelOrRef] =
Channel.jsonDecoder.either(ChannelRef.jsonDecoder).map(_.swap)
implicit def jsonEncoder(implicit mc: Encoder[MailAddress]): Encoder[ChannelOrRef] =
Encoder.instance(_.fold(ChannelRef.jsonEncoder.apply, Channel.jsonEncoder.apply))
implicit class ChannelOrRefOpts(cr: ChannelOrRef) {
def channelType: ChannelType =
cr.fold(_.channelType, _.channelType)
}
}
}

View File

@ -0,0 +1,63 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import docspell.notification.api.EventContext
import yamusca.circe._
import yamusca.implicits._
import yamusca.imports._
abstract class AbstractEventContext extends EventContext {
def titleTemplate: Template
def bodyTemplate: Template
def render(template: Template): String =
asJson.render(template).trim()
def renderHtml(template: Template): String =
Markdown.toHtml(render(template))
lazy val defaultTitle: String =
render(titleTemplate)
lazy val defaultTitleHtml: String =
renderHtml(titleTemplate)
lazy val defaultBody: String =
render(bodyTemplate)
lazy val defaultBodyHtml: String =
renderHtml(bodyTemplate)
lazy val defaultBoth: String =
render(
AbstractEventContext.concat(
titleTemplate,
AbstractEventContext.sepTemplate,
bodyTemplate
)
)
lazy val defaultBothHtml: String =
renderHtml(
AbstractEventContext.concat(
titleTemplate,
AbstractEventContext.sepTemplate,
bodyTemplate
)
)
}
object AbstractEventContext {
private val sepTemplate: Template = mustache": "
private def concat(t1: Template, ts: Template*): Template =
Template(ts.foldLeft(t1.els)((res, el) => res ++ el.els))
}

View File

@ -0,0 +1,41 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import cats.data.Kleisli
import docspell.notification.api.{Event, EventContext}
import docspell.notification.impl.context._
import doobie._
object DbEventContext {
type Factory = EventContext.Factory[ConnectionIO, Event]
def apply: Factory =
Kleisli {
case ev: Event.TagsChanged =>
TagsChangedCtx.apply.run(ev)
case ev: Event.SetFieldValue =>
SetFieldValueCtx.apply.run(ev)
case ev: Event.DeleteFieldValue =>
DeleteFieldValueCtx.apply.run(ev)
case ev: Event.ItemSelection =>
ItemSelectionCtx.apply.run(ev)
case ev: Event.JobSubmitted =>
JobSubmittedCtx.apply.run(ev)
case ev: Event.JobDone =>
JobDoneCtx.apply.run(ev)
}
}

View File

@ -0,0 +1,40 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import cats.effect._
import cats.implicits._
import docspell.common._
import docspell.notification.api._
import emil.Emil
import emil.markdown.MarkdownBody
final class EmailBackend[F[_]: Sync](
channel: NotificationChannel.Email,
mailService: Emil[F],
logger: Logger[F]
) extends NotificationBackend[F] {
import emil.builder._
def send(event: EventContext): F[Unit] = {
val mail =
MailBuilder.build(
From(channel.from),
Tos(channel.recipients.toList),
Subject(event.defaultTitle),
MarkdownBody[F](event.defaultBody)
)
logger.debug(s"Attempting to send notification mail: $channel") *>
mailService(channel.config)
.send(mail)
.flatMap(msgId => logger.info(s"Send notification mail ${msgId.head}"))
}
}

View File

@ -0,0 +1,52 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import cats.data.Kleisli
import cats.data.OptionT
import cats.effect._
import docspell.common.Logger
import docspell.notification.api.Event
import docspell.notification.api.NotificationBackend
import docspell.store.Store
import docspell.store.queries.QNotification
import emil.Emil
import org.http4s.client.Client
/** Represents the actual work done for each event. */
object EventNotify {
private[this] val log4sLogger = org.log4s.getLogger
def apply[F[_]: Async](
store: Store[F],
mailService: Emil[F],
client: Client[F]
): Kleisli[F, Event, Unit] =
Kleisli { event =>
(for {
hooks <- OptionT.liftF(store.transact(QNotification.findChannelsForEvent(event)))
evctx <- DbEventContext.apply.run(event).mapK(store.transform)
channels = hooks
.filter(hc =>
hc.channels.nonEmpty && hc.hook.eventFilter.forall(_.matches(evctx.asJson))
)
.flatMap(_.channels)
backend =
if (channels.isEmpty) NotificationBackend.silent[F]
else
NotificationBackendImpl.forChannelsIgnoreErrors(
client,
mailService,
Logger.log4s(log4sLogger)
)(channels)
_ <- OptionT.liftF(backend.send(evctx))
} yield ()).getOrElse(())
}
}

View File

@ -0,0 +1,39 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import cats.data.Kleisli
import cats.effect.kernel.Sync
import docspell.notification.api.{Event, EventContext}
import docspell.notification.impl.context._
object ExampleEventContext {
type Factory[F[_]] = EventContext.Example[F, Event]
def apply[F[_]: Sync]: Factory[F] =
Kleisli {
case ev: Event.TagsChanged =>
TagsChangedCtx.sample.run(ev)
case ev: Event.SetFieldValue =>
SetFieldValueCtx.sample.run(ev)
case ev: Event.DeleteFieldValue =>
DeleteFieldValueCtx.sample.run(ev)
case ev: Event.ItemSelection =>
ItemSelectionCtx.sample.run(ev)
case ev: Event.JobSubmitted =>
JobSubmittedCtx.sample.run(ev)
case ev: Event.JobDone =>
JobDoneCtx.sample.run(ev)
}
}

View File

@ -0,0 +1,49 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import cats.effect._
import cats.implicits._
import docspell.common.Logger
import docspell.notification.api._
import io.circe.Json
import org.http4s.Uri
import org.http4s.circe.CirceEntityCodec._
import org.http4s.client.Client
import org.http4s.client.dsl.Http4sClientDsl
import org.http4s.dsl.Http4sDsl
final class GotifyBackend[F[_]: Async](
channel: NotificationChannel.Gotify,
client: Client[F],
logger: Logger[F]
) extends NotificationBackend[F] {
val dsl = new Http4sDsl[F] with Http4sClientDsl[F] {}
import dsl._
def send(event: EventContext): F[Unit] = {
val url = Uri.unsafeFromString((channel.url / "message").asString)
val req = POST(
Json.obj(
"title" -> Json.fromString(event.defaultTitle),
"message" -> Json.fromString(event.defaultBody),
"extras" -> Json.obj(
"client::display" -> Json.obj(
"contentType" -> Json.fromString("text/markdown")
)
)
),
url
)
.putHeaders("X-Gotify-Key" -> channel.appKey.pass)
logger.debug(s"Seding request: $req") *>
HttpSend.sendRequest(client, req, channel, logger)
}
}

View File

@ -0,0 +1,36 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import cats.effect._
import cats.implicits._
import docspell.common.Logger
import docspell.notification.api._
import org.http4s.Uri
import org.http4s.client.Client
import org.http4s.client.dsl.Http4sClientDsl
import org.http4s.dsl.Http4sDsl
final class HttpPostBackend[F[_]: Async](
channel: NotificationChannel.HttpPost,
client: Client[F],
logger: Logger[F]
) extends NotificationBackend[F] {
val dsl = new Http4sDsl[F] with Http4sClientDsl[F] {}
import dsl._
import org.http4s.circe.CirceEntityCodec._
def send(event: EventContext): F[Unit] = {
val url = Uri.unsafeFromString(channel.url.asString)
val req = POST(event.asJsonWithMessage, url).putHeaders(channel.headers.toList)
logger.debug(s"$channel sending request: $req") *>
HttpSend.sendRequest(client, req, channel, logger)
}
}

View File

@ -0,0 +1,35 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import cats.effect._
import cats.implicits._
import docspell.common._
import docspell.notification.api.NotificationChannel
import org.http4s.Request
import org.http4s.client.Client
object HttpSend {
def sendRequest[F[_]: Async](
client: Client[F],
req: Request[F],
channel: NotificationChannel,
logger: Logger[F]
) =
client
.status(req)
.flatMap { status =>
if (status.isSuccess) logger.info(s"Send notification via $channel")
else
Async[F].raiseError[Unit](
new Exception(s"Error sending notification via $channel: $status")
)
}
}

View File

@ -0,0 +1,40 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import java.util
import com.vladsch.flexmark.ext.gfm.strikethrough.StrikethroughExtension
import com.vladsch.flexmark.ext.tables.TablesExtension
import com.vladsch.flexmark.html.HtmlRenderer
import com.vladsch.flexmark.parser.Parser
import com.vladsch.flexmark.util.data.{DataKey, MutableDataSet}
object Markdown {
def toHtml(md: String): String = {
val p = createParser()
val r = createRenderer()
val doc = p.parse(md)
r.render(doc).trim
}
private def createParser(): Parser = {
val opts = new MutableDataSet()
opts.set(
Parser.EXTENSIONS.asInstanceOf[DataKey[util.Collection[_]]],
util.Arrays.asList(TablesExtension.create(), StrikethroughExtension.create())
);
Parser.builder(opts).build()
}
private def createRenderer(): HtmlRenderer = {
val opts = new MutableDataSet()
HtmlRenderer.builder(opts).build()
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import cats.effect._
import docspell.common.Logger
import docspell.notification.api._
import org.http4s.Uri
import org.http4s.client.Client
import org.http4s.client.dsl.Http4sClientDsl
import org.http4s.dsl.Http4sDsl
final class MatrixBackend[F[_]: Async](
channel: NotificationChannel.Matrix,
client: Client[F],
logger: Logger[F]
) extends NotificationBackend[F] {
val dsl = new Http4sDsl[F] with Http4sClientDsl[F] {}
import dsl._
import org.http4s.circe.CirceEntityCodec._
def send(event: EventContext): F[Unit] = {
val url =
(channel.homeServer / "_matrix" / "client" / "r0" / "rooms" / channel.roomId / "send" / "m.room.message")
.withQuery("access_token", channel.accessToken.pass)
val uri = Uri.unsafeFromString(url.asString)
val req = POST(
Map(
"msgtype" -> channel.messageType,
"format" -> "org.matrix.custom.html",
"formatted_body" -> event.defaultBothHtml,
"body" -> event.defaultBoth
),
uri
)
HttpSend.sendRequest(client, req, channel, logger)
}
}

View File

@ -0,0 +1,61 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import cats.data.NonEmptyList
import cats.effect._
import docspell.common.Logger
import docspell.notification.api.NotificationBackend.{combineAll, ignoreErrors, silent}
import docspell.notification.api.{NotificationBackend, NotificationChannel}
import emil.Emil
import org.http4s.client.Client
object NotificationBackendImpl {
def forChannel[F[_]: Async](client: Client[F], mailService: Emil[F], logger: Logger[F])(
channel: NotificationChannel
): NotificationBackend[F] =
channel match {
case c: NotificationChannel.Email =>
new EmailBackend[F](c, mailService, logger)
case c: NotificationChannel.HttpPost =>
new HttpPostBackend[F](c, client, logger)
case c: NotificationChannel.Gotify =>
new GotifyBackend[F](c, client, logger)
case c: NotificationChannel.Matrix =>
new MatrixBackend[F](c, client, logger)
}
def forChannels[F[_]: Async](client: Client[F], maiService: Emil[F], logger: Logger[F])(
channels: Seq[NotificationChannel]
): NotificationBackend[F] =
NonEmptyList.fromFoldable(channels) match {
case Some(nel) =>
combineAll[F](nel.map(forChannel(client, maiService, logger)))
case None =>
silent[F]
}
def forChannelsIgnoreErrors[F[_]: Async](
client: Client[F],
mailService: Emil[F],
logger: Logger[F]
)(
channels: Seq[NotificationChannel]
): NotificationBackend[F] =
NonEmptyList.fromFoldable(channels) match {
case Some(nel) =>
combineAll(
nel.map(forChannel[F](client, mailService, logger)).map(ignoreErrors[F](logger))
)
case None =>
silent[F]
}
}

View File

@ -0,0 +1,51 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl
import cats.data.Kleisli
import cats.effect.kernel.Async
import cats.implicits._
import docspell.common._
import docspell.notification.api._
import docspell.store.Store
import emil.Emil
import org.http4s.client.Client
object NotificationModuleImpl {
def apply[F[_]: Async](
store: Store[F],
mailService: Emil[F],
client: Client[F],
queueSize: Int
): F[NotificationModule[F]] =
for {
exchange <- EventExchange.circularQueue[F](queueSize)
} yield new NotificationModule[F] {
val notifyEvent = EventNotify(store, mailService, client)
val eventContext = DbEventContext.apply.mapF(_.mapK(store.transform))
val sampleEvent = ExampleEventContext.apply
def send(
logger: Logger[F],
event: EventContext,
channels: Seq[NotificationChannel]
) =
NotificationBackendImpl
.forChannels(client, mailService, logger)(channels)
.send(event)
def offer(event: Event) = exchange.offer(event)
def consume(maxConcurrent: Int)(run: Kleisli[F, Event, Unit]) =
exchange.consume(maxConcurrent)(run)
}
}

View File

@ -0,0 +1,149 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl.context
import cats.data.NonEmptyList
import cats.effect.Sync
import cats.implicits._
import docspell.common._
import docspell.query.ItemQuery
import docspell.query.ItemQueryDsl
import docspell.store.qb.Batch
import docspell.store.queries.ListItem
import docspell.store.queries.QItem
import docspell.store.queries.Query
import docspell.store.records._
import doobie._
import io.circe.Encoder
import io.circe.generic.semiauto.deriveEncoder
object BasicData {
final case class Tag(id: Ident, name: String, category: Option[String])
object Tag {
implicit val jsonEncoder: Encoder[Tag] = deriveEncoder
def apply(t: RTag): Tag = Tag(t.tagId, t.name, t.category)
def sample[F[_]: Sync](id: String): F[Tag] =
Sync[F]
.delay(if (math.random() > 0.5) "Invoice" else "Receipt")
.map(tag => Tag(Ident.unsafe(id), tag, Some("doctype")))
}
final case class Item(
id: Ident,
name: String,
dateMillis: Timestamp,
date: String,
direction: Direction,
state: ItemState,
dueDateMillis: Option[Timestamp],
dueDate: Option[String],
source: String,
overDue: Boolean,
dueIn: Option[String],
corrOrg: Option[String],
notes: Option[String]
)
object Item {
implicit val jsonEncoder: Encoder[Item] = deriveEncoder
private def calcDueLabels(now: Timestamp, dueDate: Option[Timestamp]) = {
val dueIn = dueDate.map(dt => Timestamp.daysBetween(now, dt))
val dueInLabel = dueIn.map {
case 0 => "**today**"
case 1 => "**tomorrow**"
case -1 => s"**yesterday**"
case n if n > 0 => s"in $n days"
case n => s"${n * -1} days ago"
}
(dueIn, dueInLabel)
}
def find(
itemIds: NonEmptyList[Ident],
account: AccountId,
now: Timestamp
): ConnectionIO[Vector[Item]] = {
import ItemQueryDsl._
val q = Query(
Query.Fix(
account,
Some(ItemQuery.Attr.ItemId.in(itemIds.map(_.id))),
Some(_.created)
)
)
for {
items <- QItem
.findItems(q, now.toUtcDate, 25, Batch.limit(itemIds.size))
.compile
.to(Vector)
} yield items.map(apply(now))
}
def apply(now: Timestamp)(i: ListItem): Item = {
val (dueIn, dueInLabel) = calcDueLabels(now, i.dueDate)
Item(
i.id,
i.name,
i.date,
i.date.toUtcDate.toString,
i.direction,
i.state,
i.dueDate,
i.dueDate.map(_.toUtcDate.toString),
i.source,
dueIn.exists(_ < 0),
dueInLabel,
i.corrOrg.map(_.name),
i.notes
)
}
def sample[F[_]: Sync](id: Ident): F[Item] =
Timestamp.current[F].map { now =>
val dueDate = if (id.hashCode % 2 == 0) Some(now + Duration.days(3)) else None
val (dueIn, dueInLabel) = calcDueLabels(now, dueDate)
Item(
id,
"MapleSirupLtd_202331.pdf",
now - Duration.days(62),
(now - Duration.days(62)).toUtcDate.toString,
Direction.Incoming,
ItemState.Confirmed,
dueDate,
dueDate.map(_.toUtcDate.toString),
"webapp",
dueIn.exists(_ < 0),
dueInLabel,
Some("Acme AG"),
None
)
}
}
final case class Field(
id: Ident,
name: Ident,
label: Option[String],
ftype: CustomFieldType
)
object Field {
implicit val jsonEncoder: Encoder[Field] = deriveEncoder
def apply(r: RCustomField): Field =
Field(r.id, r.name, r.label, r.ftype)
def sample[F[_]: Sync](id: Ident): F[Field] =
Sync[F].delay(Field(id, Ident.unsafe("chf"), Some("CHF"), CustomFieldType.Money))
}
}

View File

@ -0,0 +1,83 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl.context
import cats.data.Kleisli
import cats.data.OptionT
import cats.effect.Sync
import cats.implicits._
import docspell.common._
import docspell.notification.api.{Event, EventContext}
import docspell.notification.impl.AbstractEventContext
import docspell.notification.impl.context.BasicData._
import docspell.notification.impl.context.Syntax._
import docspell.store.records._
import doobie._
import io.circe.Encoder
import io.circe.syntax._
import yamusca.implicits._
final case class DeleteFieldValueCtx(
event: Event.DeleteFieldValue,
data: DeleteFieldValueCtx.Data
) extends AbstractEventContext {
val content = data.asJson
val titleTemplate = mustache"{{eventType}} (by *{{account.user}}*)"
val bodyTemplate =
mustache"""{{#content}}{{#field.label}}*{{field.label}}* {{/field.label}}{{^field.label}}*{{field.name}}* {{/field.label}} was removed from {{#items}}{{^-first}}, {{/-first}}{{#itemUrl}}[`{{name}}`]({{{itemUrl}}}/{{{id}}}){{/itemUrl}}{{^itemUrl}}`{{name}}`{{/itemUrl}}{{/items}}.{{/content}}"""
}
object DeleteFieldValueCtx {
type Factory = EventContext.Factory[ConnectionIO, Event.DeleteFieldValue]
def apply: Factory =
Kleisli(ev =>
for {
now <- OptionT.liftF(Timestamp.current[ConnectionIO])
items <- OptionT.liftF(Item.find(ev.items, ev.account, now))
field <- OptionT(RCustomField.findById(ev.field, ev.account.collective))
msg = DeleteFieldValueCtx(
ev,
Data(
ev.account,
items.toList,
Field(field),
ev.itemUrl
)
)
} yield msg
)
def sample[F[_]: Sync]: EventContext.Example[F, Event.DeleteFieldValue] =
EventContext.example(ev =>
for {
items <- ev.items.traverse(Item.sample[F])
field <- Field.sample[F](ev.field)
} yield DeleteFieldValueCtx(
ev,
Data(ev.account, items.toList, field, ev.itemUrl)
)
)
final case class Data(
account: AccountId,
items: List[Item],
field: Field,
itemUrl: Option[String]
)
object Data {
implicit val jsonEncoder: Encoder[Data] =
io.circe.generic.semiauto.deriveEncoder
}
}

View File

@ -0,0 +1,117 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl.context
import cats.effect._
import cats.implicits._
import docspell.common._
import docspell.notification.api._
import docspell.notification.impl.AbstractEventContext
import docspell.notification.impl.context.Syntax._
import docspell.store.queries.ListItem
import doobie._
import io.circe.Encoder
import io.circe.syntax._
import yamusca.implicits._
final case class ItemSelectionCtx(event: Event.ItemSelection, data: ItemSelectionCtx.Data)
extends AbstractEventContext {
val content = data.asJson
val titleTemplate = mustache"Your items"
val bodyTemplate = mustache"""
Hello {{{ content.username }}},
this is Docspell informing you about your next items.
{{#content}}
{{#itemUrl}}
{{#items}}
- {{#overDue}}**(OVERDUE)** {{/overDue}}[{{name}}]({{itemUrl}}/{{id}}){{#dueDate}}, {{#overDue}}was {{/overDue}}due {{dueIn}} on *{{dueDate}}*{{/dueDate}}; {{#corrOrg}}from {{corrOrg}}{{/corrOrg}} received on {{date}} via {{source}}
{{/items}}
{{/itemUrl}}
{{^itemUrl}}
{{#items}}
- {{#overDue}}**(OVERDUE)** {{/overDue}}*{{name}}*{{#dueDate}}, {{#overDue}}was {{/overDue}}due {{dueIn}} on *{{dueDate}}*{{/dueDate}}; {{#corrOrg}}from {{corrOrg}}{{/corrOrg}} received on {{date}} via {{source}}
{{/items}}
{{/itemUrl}}
{{#more}}
- more have been left out for brevity
{{/more}}
{{/content}}
Sincerely yours,
Docspell
"""
}
object ItemSelectionCtx {
import BasicData._
type Factory = EventContext.Factory[ConnectionIO, Event.ItemSelection]
def apply: Factory =
EventContext.factory(ev =>
for {
now <- Timestamp.current[ConnectionIO]
items <- Item.find(ev.items, ev.account, now)
msg = ItemSelectionCtx(
ev,
Data(
ev.account,
items.toList,
ev.itemUrl,
ev.more,
ev.account.user.id
)
)
} yield msg
)
def sample[F[_]: Sync]: EventContext.Example[F, Event.ItemSelection] =
EventContext.example(ev =>
for {
items <- ev.items.traverse(Item.sample[F])
} yield ItemSelectionCtx(
ev,
Data(ev.account, items.toList, ev.itemUrl, ev.more, ev.account.user.id)
)
)
final case class Data(
account: AccountId,
items: List[Item],
itemUrl: Option[String],
more: Boolean,
username: String
)
object Data {
implicit val jsonEncoder: Encoder[Data] =
io.circe.generic.semiauto.deriveEncoder
def create(
account: AccountId,
items: Vector[ListItem],
baseUrl: Option[LenientUri],
more: Boolean,
now: Timestamp
): Data =
Data(
account,
items.map(Item(now)).toList,
baseUrl.map(_.asString),
more,
account.user.id
)
}
}

View File

@ -0,0 +1,56 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl.context
import cats.effect._
import docspell.common._
import docspell.notification.api._
import docspell.notification.impl.AbstractEventContext
import doobie._
import io.circe.Encoder
import io.circe.syntax._
import yamusca.implicits._
final case class JobDoneCtx(event: Event.JobDone, data: JobDoneCtx.Data)
extends AbstractEventContext {
val content = data.asJson
val titleTemplate = mustache"{{eventType}} (by *{{account.user}}*)"
val bodyTemplate = mustache"""{{#content}}_'{{subject}}'_ finished {{/content}}"""
}
object JobDoneCtx {
type Factory = EventContext.Factory[ConnectionIO, Event.JobDone]
def apply: Factory =
EventContext.pure(ev => JobDoneCtx(ev, Data(ev)))
def sample[F[_]: Sync]: EventContext.Example[F, Event.JobDone] =
EventContext.example(ev => Sync[F].pure(JobDoneCtx(ev, Data(ev))))
final case class Data(
job: Ident,
group: Ident,
task: Ident,
args: String,
state: JobState,
subject: String,
submitter: Ident
)
object Data {
implicit val jsonEncoder: Encoder[Data] =
io.circe.generic.semiauto.deriveEncoder
def apply(ev: Event.JobDone): Data =
Data(ev.jobId, ev.group, ev.task, ev.args, ev.state, ev.subject, ev.submitter)
}
}

View File

@ -0,0 +1,57 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl.context
import cats.effect._
import docspell.common._
import docspell.notification.api._
import docspell.notification.impl.AbstractEventContext
import doobie._
import io.circe.Encoder
import io.circe.syntax._
import yamusca.implicits._
final case class JobSubmittedCtx(event: Event.JobSubmitted, data: JobSubmittedCtx.Data)
extends AbstractEventContext {
val content = data.asJson
val titleTemplate = mustache"{{eventType}} (by *{{account.user}}*)"
val bodyTemplate =
mustache"""{{#content}}_'{{subject}}'_ submitted by {{submitter}} {{/content}}"""
}
object JobSubmittedCtx {
type Factory = EventContext.Factory[ConnectionIO, Event.JobSubmitted]
def apply: Factory =
EventContext.pure(ev => JobSubmittedCtx(ev, Data(ev)))
def sample[F[_]: Sync]: EventContext.Example[F, Event.JobSubmitted] =
EventContext.example(ev => Sync[F].pure(JobSubmittedCtx(ev, Data(ev))))
final case class Data(
job: Ident,
group: Ident,
task: Ident,
args: String,
state: JobState,
subject: String,
submitter: Ident
)
object Data {
implicit val jsonEncoder: Encoder[Data] =
io.circe.generic.semiauto.deriveEncoder
def apply(ev: Event.JobSubmitted): Data =
Data(ev.jobId, ev.group, ev.task, ev.args, ev.state, ev.subject, ev.submitter)
}
}

View File

@ -0,0 +1,83 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl.context
import cats.data.Kleisli
import cats.data.OptionT
import cats.effect.Sync
import cats.implicits._
import docspell.common._
import docspell.notification.api.{Event, EventContext}
import docspell.notification.impl.AbstractEventContext
import docspell.notification.impl.context.BasicData._
import docspell.notification.impl.context.Syntax._
import docspell.store.records._
import doobie._
import io.circe.Encoder
import io.circe.syntax._
import yamusca.implicits._
final case class SetFieldValueCtx(event: Event.SetFieldValue, data: SetFieldValueCtx.Data)
extends AbstractEventContext {
val content = data.asJson
val titleTemplate = mustache"{{eventType}} (by *{{account.user}}*)"
val bodyTemplate =
mustache"""{{#content}}{{#field.label}}*{{field.label}}* {{/field.label}}{{^field.label}}*{{field.name}}* {{/field.label}} was set to '{{value}}' on {{#items}}{{^-first}}, {{/-first}}{{#itemUrl}}[`{{name}}`]({{{itemUrl}}}/{{{id}}}){{/itemUrl}}{{^itemUrl}}`{{name}}`{{/itemUrl}}{{/items}}.{{/content}}"""
}
object SetFieldValueCtx {
type Factory = EventContext.Factory[ConnectionIO, Event.SetFieldValue]
def apply: Factory =
Kleisli(ev =>
for {
now <- OptionT.liftF(Timestamp.current[ConnectionIO])
items <- OptionT.liftF(Item.find(ev.items, ev.account, now))
field <- OptionT(RCustomField.findById(ev.field, ev.account.collective))
msg = SetFieldValueCtx(
ev,
Data(
ev.account,
items.toList,
Field(field),
ev.value,
ev.itemUrl
)
)
} yield msg
)
def sample[F[_]: Sync]: EventContext.Example[F, Event.SetFieldValue] =
EventContext.example(ev =>
for {
items <- ev.items.traverse(Item.sample[F])
field <- Field.sample[F](ev.field)
} yield SetFieldValueCtx(
ev,
Data(ev.account, items.toList, field, ev.value, ev.itemUrl)
)
)
final case class Data(
account: AccountId,
items: List[Item],
field: Field,
value: String,
itemUrl: Option[String]
)
object Data {
implicit val jsonEncoder: Encoder[Data] =
io.circe.generic.semiauto.deriveEncoder
}
}

View File

@ -0,0 +1,18 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl.context
import docspell.notification.api.Event
object Syntax {
implicit final class EventOps(ev: Event) {
def itemUrl: Option[String] =
ev.baseUrl.map(_ / "app" / "item").map(_.asString)
}
}

View File

@ -0,0 +1,82 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl.context
import cats.effect.Sync
import cats.implicits._
import docspell.common._
import docspell.notification.api.{Event, EventContext}
import docspell.notification.impl.AbstractEventContext
import docspell.notification.impl.context.BasicData._
import docspell.notification.impl.context.Syntax._
import docspell.store.records._
import doobie._
import io.circe.Encoder
import io.circe.syntax._
import yamusca.implicits._
final case class TagsChangedCtx(event: Event.TagsChanged, data: TagsChangedCtx.Data)
extends AbstractEventContext {
val content = data.asJson
val titleTemplate = mustache"{{eventType}} (by *{{account.user}}*)"
val bodyTemplate =
mustache"""{{#content}}{{#added}}{{#-first}}Adding {{/-first}}{{^-first}}, {{/-first}}*{{name}}*{{/added}}{{#removed}}{{#added}}{{#-first}};{{/-first}}{{/added}}{{#-first}} Removing {{/-first}}{{^-first}}, {{/-first}}*{{name}}*{{/removed}} on {{#items}}{{^-first}}, {{/-first}}{{#itemUrl}}[`{{name}}`]({{{itemUrl}}}/{{{id}}}){{/itemUrl}}{{^itemUrl}}`{{name}}`{{/itemUrl}}{{/items}}.{{/content}}"""
}
object TagsChangedCtx {
type Factory = EventContext.Factory[ConnectionIO, Event.TagsChanged]
def apply: Factory =
EventContext.factory(ev =>
for {
tagsAdded <- RTag.findAllByNameOrId(ev.added, ev.account.collective)
tagsRemov <- RTag.findAllByNameOrId(ev.removed, ev.account.collective)
now <- Timestamp.current[ConnectionIO]
items <- Item.find(ev.items, ev.account, now)
msg = TagsChangedCtx(
ev,
Data(
ev.account,
items.toList,
tagsAdded.map(Tag.apply).toList,
tagsRemov.map(Tag.apply).toList,
ev.itemUrl
)
)
} yield msg
)
def sample[F[_]: Sync]: EventContext.Example[F, Event.TagsChanged] =
EventContext.example(ev =>
for {
items <- ev.items.traverse(Item.sample[F])
added <- ev.added.traverse(Tag.sample[F])
remov <- ev.removed.traverse(Tag.sample[F])
} yield TagsChangedCtx(
ev,
Data(ev.account, items.toList, added, remov, ev.itemUrl)
)
)
final case class Data(
account: AccountId,
items: List[Item],
added: List[Tag],
removed: List[Tag],
itemUrl: Option[String]
)
object Data {
implicit val jsonEncoder: Encoder[Data] =
io.circe.generic.semiauto.deriveEncoder
}
}

View File

@ -0,0 +1,75 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.notification.impl.context
import cats.data.{NonEmptyList => Nel}
import cats.implicits._
import docspell.common._
import docspell.notification.api.Event
import docspell.notification.impl.context.BasicData._
import munit._
class TagsChangedCtxTest extends FunSuite {
val url = LenientUri.unsafe("http://test")
val account = AccountId(id("user2"), id("user2"))
val tag = Tag(id("a-b-1"), "tag-red", Some("doctype"))
val item = Item(
id = id("item-1"),
name = "Report 2",
dateMillis = Timestamp.Epoch,
date = "2020-11-11",
direction = Direction.Incoming,
state = ItemState.created,
dueDateMillis = None,
dueDate = None,
source = "webapp",
overDue = false,
dueIn = None,
corrOrg = Some("Acme"),
notes = None
)
def id(str: String): Ident = Ident.unsafe(str)
test("create tags changed message") {
val event =
Event.TagsChanged(account, Nel.of(id("item1")), List("tag-id"), Nil, url.some)
val ctx = TagsChangedCtx(
event,
TagsChangedCtx.Data(account, List(item), List(tag), Nil, url.some.map(_.asString))
)
assertEquals(ctx.defaultTitle, "TagsChanged (by *user2*)")
assertEquals(
ctx.defaultBody,
"Adding *tag-red* on [`Report 2`](http://test/item-1)."
)
}
test("create tags changed message") {
val event = Event.TagsChanged(account, Nel.of(id("item1")), Nil, Nil, url.some)
val ctx = TagsChangedCtx(
event,
TagsChangedCtx.Data(
account,
List(item),
List(tag),
List(tag.copy(name = "tag-blue")),
url.asString.some
)
)
assertEquals(ctx.defaultTitle, "TagsChanged (by *user2*)")
assertEquals(
ctx.defaultBody,
"Adding *tag-red*; Removing *tag-blue* on [`Report 2`](http://test/item-1)."
)
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,25 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.restapi.codec
import docspell.notification.api.ChannelRef
import docspell.restapi.model._
import io.circe.syntax._
import io.circe.{Decoder, Encoder}
trait ChannelEitherCodec {
implicit val channelDecoder: Decoder[Either[ChannelRef, NotificationChannel]] =
NotificationChannel.jsonDecoder.either(ChannelRef.jsonDecoder).map(_.swap)
implicit val channelEncoder: Encoder[Either[ChannelRef, NotificationChannel]] =
Encoder.instance(_.fold(_.asJson, _.asJson))
}
object ChannelEitherCodec extends ChannelEitherCodec

View File

@ -0,0 +1,130 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.restapi.model
import cats.data.NonEmptyList
import cats.implicits._
import docspell.notification.api.Channel
import docspell.notification.api.ChannelType
import docspell.restapi.model._
import emil.MailAddress
import emil.javamail.syntax._
import io.circe.{Decoder, Encoder}
sealed trait NotificationChannel {
def fold[A](
f1: NotificationMail => A,
f2: NotificationGotify => A,
f3: NotificationMatrix => A,
f4: NotificationHttp => A
): A
}
object NotificationChannel {
final case class Mail(c: NotificationMail) extends NotificationChannel {
def fold[A](
f1: NotificationMail => A,
f2: NotificationGotify => A,
f3: NotificationMatrix => A,
f4: NotificationHttp => A
): A = f1(c)
}
final case class Gotify(c: NotificationGotify) extends NotificationChannel {
def fold[A](
f1: NotificationMail => A,
f2: NotificationGotify => A,
f3: NotificationMatrix => A,
f4: NotificationHttp => A
): A = f2(c)
}
final case class Matrix(c: NotificationMatrix) extends NotificationChannel {
def fold[A](
f1: NotificationMail => A,
f2: NotificationGotify => A,
f3: NotificationMatrix => A,
f4: NotificationHttp => A
): A = f3(c)
}
final case class Http(c: NotificationHttp) extends NotificationChannel {
def fold[A](
f1: NotificationMail => A,
f2: NotificationGotify => A,
f3: NotificationMatrix => A,
f4: NotificationHttp => A
): A = f4(c)
}
def mail(c: NotificationMail): NotificationChannel = Mail(c)
def gotify(c: NotificationGotify): NotificationChannel = Gotify(c)
def matrix(c: NotificationMatrix): NotificationChannel = Matrix(c)
def http(c: NotificationHttp): NotificationChannel = Http(c)
def convert(c: NotificationChannel): Either[Throwable, Channel] =
c.fold(
mail =>
mail.recipients
.traverse(MailAddress.parse)
.map(NonEmptyList.fromList)
.flatMap(_.toRight("No recipients given!"))
.leftMap(new IllegalArgumentException(_))
.map(rec => Channel.Mail(mail.id, mail.connection, rec)),
gotify => Right(Channel.Gotify(gotify.id, gotify.url, gotify.appKey)),
matrix =>
Right(
Channel
.Matrix(matrix.id, matrix.homeServer, matrix.roomId, matrix.accessToken)
),
http => Right(Channel.Http(http.id, http.url))
)
def convert(c: Channel): NotificationChannel =
c.fold(
m =>
mail {
NotificationMail(
m.id,
ChannelType.Mail,
m.connection,
m.recipients.toList.map(_.displayString)
)
},
g => gotify(NotificationGotify(g.id, ChannelType.Gotify, g.url, g.appKey)),
m =>
matrix(
NotificationMatrix(
m.id,
ChannelType.Matrix,
m.homeServer,
m.roomId,
m.accessToken
)
),
h => http(NotificationHttp(h.id, ChannelType.Http, h.url))
)
implicit val jsonDecoder: Decoder[NotificationChannel] =
ChannelType.jsonDecoder.at("channelType").flatMap {
case ChannelType.Mail => Decoder[NotificationMail].map(mail)
case ChannelType.Gotify => Decoder[NotificationGotify].map(gotify)
case ChannelType.Matrix => Decoder[NotificationMatrix].map(matrix)
case ChannelType.Http => Decoder[NotificationHttp].map(http)
}
implicit val jsonEncoder: Encoder[NotificationChannel] =
Encoder.instance {
case NotificationChannel.Mail(c) =>
Encoder[NotificationMail].apply(c)
case NotificationChannel.Gotify(c) =>
Encoder[NotificationGotify].apply(c)
case NotificationChannel.Matrix(c) =>
Encoder[NotificationMatrix].apply(c)
case NotificationChannel.Http(c) =>
Encoder[NotificationHttp].apply(c)
}
}

View File

@ -0,0 +1,33 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.restapi.model
import docspell.common._
import docspell.jsonminiq.JsonMiniQuery
import docspell.notification.api.{ChannelRef, EventType}
import docspell.restapi.codec.ChannelEitherCodec
import io.circe.{Decoder, Encoder}
// this must comply to the definition in openapi.yml in `extraSchemas`
final case class NotificationHook(
id: Ident,
enabled: Boolean,
channel: Either[ChannelRef, NotificationChannel],
allEvents: Boolean,
eventFilter: Option[JsonMiniQuery],
events: List[EventType]
)
object NotificationHook {
import ChannelEitherCodec._
implicit val jsonDecoder: Decoder[NotificationHook] =
io.circe.generic.semiauto.deriveDecoder
implicit val jsonEncoder: Encoder[NotificationHook] =
io.circe.generic.semiauto.deriveEncoder
}

View File

@ -0,0 +1,35 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.restapi.model
import docspell.common._
import docspell.restapi.model._
import com.github.eikek.calev.CalEvent
import com.github.eikek.calev.circe.CalevCirceCodec._
import io.circe.generic.semiauto
import io.circe.{Decoder, Encoder}
// this must comply to the definition in openapi.yml in `extraSchemas`
final case class PeriodicDueItemsSettings(
id: Ident,
enabled: Boolean,
summary: Option[String],
channel: NotificationChannel,
schedule: CalEvent,
remindDays: Int,
capOverdue: Boolean,
tagsInclude: List[Tag],
tagsExclude: List[Tag]
)
object PeriodicDueItemsSettings {
implicit val jsonDecoder: Decoder[PeriodicDueItemsSettings] =
semiauto.deriveDecoder[PeriodicDueItemsSettings]
implicit val jsonEncoder: Encoder[PeriodicDueItemsSettings] =
semiauto.deriveEncoder[PeriodicDueItemsSettings]
}

View File

@ -0,0 +1,35 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.restapi.model
import docspell.common._
import docspell.query.ItemQuery
import docspell.restapi.codec.ItemQueryJson._
import com.github.eikek.calev.CalEvent
import com.github.eikek.calev.circe.CalevCirceCodec._
import io.circe.generic.semiauto
import io.circe.{Decoder, Encoder}
// this must comply to the definition in openapi.yml in `extraSchemas`
final case class PeriodicQuerySettings(
id: Ident,
summary: Option[String],
enabled: Boolean,
channel: NotificationChannel,
query: ItemQuery,
schedule: CalEvent
) {}
object PeriodicQuerySettings {
implicit val jsonDecoder: Decoder[PeriodicQuerySettings] =
semiauto.deriveDecoder
implicit val jsonEncoder: Encoder[PeriodicQuerySettings] =
semiauto.deriveEncoder
}

View File

@ -0,0 +1,62 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.restapi.model
import docspell.common._
import docspell.notification.api.ChannelRef
import docspell.notification.api.ChannelType
import io.circe.Decoder
import io.circe.parser
import munit._
class NotificationCodecTest extends FunSuite {
def parse[A: Decoder](str: String): A =
parser.parse(str).fold(throw _, identity).as[A].fold(throw _, identity)
def id(str: String): Ident =
Ident.unsafe(str)
test("decode with channelref") {
val json = """{"id":"",
"enabled": true,
"channel": {"id":"abcde", "channelType":"matrix"},
"allEvents": false,
"events": ["TagsChanged", "SetFieldValue"]
}"""
val hook = parse[NotificationHook](json)
assertEquals(hook.enabled, true)
assertEquals(hook.channel, Left(ChannelRef(id("abcde"), ChannelType.Matrix)))
}
test("decode with gotify data") {
val json = """{"id":"",
"enabled": true,
"channel": {"id":"", "channelType":"gotify", "url":"http://test.gotify.com", "appKey": "abcde"},
"allEvents": false,
"eventFilter": null,
"events": ["TagsChanged", "SetFieldValue"]
}"""
val hook = parse[NotificationHook](json)
assertEquals(hook.enabled, true)
assertEquals(
hook.channel,
Right(
NotificationChannel.Gotify(
NotificationGotify(
id(""),
ChannelType.Gotify,
LenientUri.unsafe("http://test.gotify.com"),
Password("abcde")
)
)
)
)
}
}

View File

@ -9,7 +9,7 @@
<logger name="docspell" level="debug" />
<logger name="emil" level="debug"/>
<logger name="org.http4s.server.message-failures" level="debug"/>
<root level="INFO">
<appender-ref ref="STDOUT" />
</root>

View File

@ -6,11 +6,23 @@
package docspell.restserver
import fs2.Stream
import docspell.backend.BackendApp
trait RestApp[F[_]] {
/** Access to the configuration used to build backend services. */
def config: Config
/** Access to all backend services */
def backend: BackendApp[F]
/** Stream consuming events (async) originating in this application. */
def eventConsume(maxConcurrent: Int): Stream[F, Nothing]
/** Stream consuming messages from topics (pubsub) and forwarding them to the frontend
* via websocket.
*/
def subscriptions: Stream[F, Nothing]
}

View File

@ -7,18 +7,36 @@
package docspell.restserver
import cats.effect._
import fs2.Stream
import fs2.concurrent.Topic
import docspell.backend.BackendApp
import docspell.common.Logger
import docspell.ftsclient.FtsClient
import docspell.ftssolr.SolrFtsClient
import docspell.notification.api.NotificationModule
import docspell.notification.impl.NotificationModuleImpl
import docspell.pubsub.api.{PubSub, PubSubT}
import docspell.restserver.ws.OutputEvent
import docspell.store.Store
import emil.javamail.JavaMailEmil
import org.http4s.client.Client
final class RestAppImpl[F[_]](val config: Config, val backend: BackendApp[F])
extends RestApp[F] {}
final class RestAppImpl[F[_]: Async](
val config: Config,
val backend: BackendApp[F],
notificationMod: NotificationModule[F],
wsTopic: Topic[F, OutputEvent],
pubSub: PubSubT[F]
) extends RestApp[F] {
def eventConsume(maxConcurrent: Int): Stream[F, Nothing] =
notificationMod.consumeAllEvents(maxConcurrent)
def subscriptions: Stream[F, Nothing] =
Subscriptions[F](wsTopic, pubSub)
}
object RestAppImpl {
@ -26,14 +44,21 @@ object RestAppImpl {
cfg: Config,
store: Store[F],
httpClient: Client[F],
pubSub: PubSub[F]
pubSub: PubSub[F],
wsTopic: Topic[F, OutputEvent]
): Resource[F, RestApp[F]] = {
val logger = Logger.log4s(org.log4s.getLogger(s"restserver-${cfg.appId.id}"))
for {
ftsClient <- createFtsClient(cfg)(httpClient)
pubSubT = PubSubT(pubSub, logger)
backend <- BackendApp.create[F](cfg.backend, store, ftsClient, pubSubT)
app = new RestAppImpl[F](cfg, backend)
javaEmil = JavaMailEmil(cfg.backend.mailSettings)
notificationMod <- Resource.eval(
NotificationModuleImpl[F](store, javaEmil, httpClient, 200)
)
backend <- BackendApp
.create[F](store, javaEmil, ftsClient, pubSubT, notificationMod)
app = new RestAppImpl[F](cfg, backend, notificationMod, wsTopic, pubSubT)
} yield app
}

View File

@ -50,10 +50,11 @@ object RestServer {
server =
Stream
.resource(createApp(cfg, pools))
.resource(createApp(cfg, pools, wsTopic))
.flatMap { case (restApp, pubSub, httpClient, setting) =>
Stream(
Subscriptions(wsTopic, restApp.backend.pubSub),
restApp.subscriptions,
restApp.eventConsume(2),
BlazeServerBuilder[F]
.bindHttp(cfg.bind.port, cfg.bind.address)
.withoutBanner
@ -71,8 +72,12 @@ object RestServer {
def createApp[F[_]: Async](
cfg: Config,
pools: Pools
): Resource[F, (RestApp[F], NaivePubSub[F], Client[F], RInternalSetting)] =
pools: Pools,
wsTopic: Topic[F, OutputEvent]
): Resource[
F,
(RestApp[F], NaivePubSub[F], Client[F], RInternalSetting)
] =
for {
httpClient <- BlazeClientBuilder[F].resource
store <- Store.create[F](
@ -86,7 +91,7 @@ object RestServer {
store,
httpClient
)(Topics.all.map(_.topic))
restApp <- RestAppImpl.create[F](cfg, store, httpClient, pubSub)
restApp <- RestAppImpl.create[F](cfg, store, httpClient, pubSub, wsTopic)
} yield (restApp, pubSub, httpClient, setting)
def createHttpApp[F[_]: Async](
@ -150,7 +155,7 @@ object RestServer {
"collective" -> CollectiveRoutes(restApp.backend, token),
"queue" -> JobQueueRoutes(restApp.backend, token),
"item" -> ItemRoutes(cfg, restApp.backend, token),
"items" -> ItemMultiRoutes(restApp.backend, token),
"items" -> ItemMultiRoutes(cfg, restApp.backend, token),
"attachment" -> AttachmentRoutes(restApp.backend, token),
"attachments" -> AttachmentMultiRoutes(restApp.backend, token),
"upload" -> UploadRoutes.secured(restApp.backend, cfg, token),
@ -161,11 +166,13 @@ object RestServer {
"share" -> ShareRoutes.manage(restApp.backend, token),
"usertask/notifydueitems" -> NotifyDueItemsRoutes(cfg, restApp.backend, token),
"usertask/scanmailbox" -> ScanMailboxRoutes(restApp.backend, token),
"usertask/periodicquery" -> PeriodicQueryRoutes(cfg, restApp.backend, token),
"calevent/check" -> CalEventCheckRoutes(),
"fts" -> FullTextIndexRoutes.secured(cfg, restApp.backend, token),
"folder" -> FolderRoutes(restApp.backend, token),
"customfield" -> CustomFieldRoutes(restApp.backend, token),
"clientSettings" -> ClientSettingsRoutes(restApp.backend, token)
"clientSettings" -> ClientSettingsRoutes(restApp.backend, token),
"notification" -> NotificationRoutes(cfg, restApp.backend, token)
)
def openRoutes[F[_]: Async](

View File

@ -14,7 +14,9 @@ import docspell.backend.auth.AuthToken
import docspell.backend.ops.OCustomFields.{RemoveValue, SetValue}
import docspell.common._
import docspell.restapi.model._
import docspell.restserver.Config
import docspell.restserver.conv.{Conversions, MultiIdSupport}
import docspell.restserver.http4s.ClientRequestInfo
import org.http4s.HttpRoutes
import org.http4s.circe.CirceEntityDecoder._
@ -26,6 +28,7 @@ object ItemMultiRoutes extends MultiIdSupport {
private[this] val log4sLogger = getLogger
def apply[F[_]: Async](
cfg: Config,
backend: BackendApp[F],
user: AuthToken
): HttpRoutes[F] = {
@ -66,7 +69,9 @@ object ItemMultiRoutes extends MultiIdSupport {
json.refs,
user.account.collective
)
resp <- Ok(Conversions.basicResult(res, "Tags updated"))
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value, "Tags updated"))
} yield resp
case req @ POST -> Root / "tags" =>
@ -78,7 +83,9 @@ object ItemMultiRoutes extends MultiIdSupport {
json.refs,
user.account.collective
)
resp <- Ok(Conversions.basicResult(res, "Tags added."))
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value, "Tags added."))
} yield resp
case req @ POST -> Root / "tagsremove" =>
@ -90,7 +97,9 @@ object ItemMultiRoutes extends MultiIdSupport {
json.refs,
user.account.collective
)
resp <- Ok(Conversions.basicResult(res, "Tags removed"))
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value, "Tags removed"))
} yield resp
case req @ PUT -> Root / "name" =>
@ -205,7 +214,9 @@ object ItemMultiRoutes extends MultiIdSupport {
items,
SetValue(json.field.field, json.field.value, user.account.collective)
)
resp <- Ok(Conversions.basicResult(res))
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value))
} yield resp
case req @ POST -> Root / "customfieldremove" =>
@ -216,7 +227,9 @@ object ItemMultiRoutes extends MultiIdSupport {
res <- backend.customFields.deleteValue(
RemoveValue(field, items, user.account.collective)
)
resp <- Ok(Conversions.basicResult(res, "Custom fields removed."))
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value, "Custom fields removed."))
} yield resp
case req @ POST -> Root / "merge" =>

View File

@ -25,6 +25,7 @@ import docspell.restapi.model._
import docspell.restserver.Config
import docspell.restserver.conv.Conversions
import docspell.restserver.http4s.BinaryUtil
import docspell.restserver.http4s.ClientRequestInfo
import docspell.restserver.http4s.Responses
import docspell.restserver.http4s.{QueryParam => QP}
@ -160,29 +161,37 @@ object ItemRoutes {
for {
tags <- req.as[StringList].map(_.items)
res <- backend.item.setTags(id, tags, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Tags updated"))
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value, "Tags updated"))
} yield resp
case req @ POST -> Root / Ident(id) / "tags" =>
for {
data <- req.as[Tag]
rtag <- Conversions.newTag(data, user.account.collective)
res <- backend.item.addNewTag(id, rtag)
resp <- Ok(Conversions.basicResult(res, "Tag added."))
res <- backend.item.addNewTag(user.account.collective, id, rtag)
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value, "Tag added."))
} yield resp
case req @ PUT -> Root / Ident(id) / "taglink" =>
for {
tags <- req.as[StringList]
res <- backend.item.linkTags(id, tags.items, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Tags linked"))
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value, "Tags linked"))
} yield resp
case req @ POST -> Root / Ident(id) / "tagtoggle" =>
for {
tags <- req.as[StringList]
res <- backend.item.toggleTags(id, tags.items, user.account.collective)
resp <- Ok(Conversions.basicResult(res, "Tags linked"))
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value, "Tags linked"))
} yield resp
case req @ POST -> Root / Ident(id) / "tagsremove" =>
@ -193,7 +202,9 @@ object ItemRoutes {
json.items,
user.account.collective
)
resp <- Ok(Conversions.basicResult(res, "Tags removed"))
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value, "Tags removed"))
} yield resp
case req @ PUT -> Root / Ident(id) / "direction" =>
@ -392,15 +403,19 @@ object ItemRoutes {
id,
SetValue(data.field, data.value, user.account.collective)
)
resp <- Ok(Conversions.basicResult(res))
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value))
} yield resp
case DELETE -> Root / Ident(id) / "customfield" / Ident(fieldId) =>
case req @ DELETE -> Root / Ident(id) / "customfield" / Ident(fieldId) =>
for {
res <- backend.customFields.deleteValue(
RemoveValue(fieldId, NonEmptyList.of(id), user.account.collective)
)
resp <- Ok(Conversions.basicResult(res, "Custom field value removed."))
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
resp <- Ok(Conversions.basicResult(res.value, "Custom field value removed."))
} yield resp
case DELETE -> Root / Ident(id) =>

View File

@ -0,0 +1,207 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.restserver.routes
import cats.effect._
import cats.implicits._
import docspell.backend.BackendApp
import docspell.backend.auth.AuthToken
import docspell.common._
import docspell.joexapi.model.BasicResult
import docspell.jsonminiq.JsonMiniQuery
import docspell.notification.api.EventType
import docspell.restapi.model._
import docspell.restserver.Config
import docspell.restserver.conv.Conversions
import docspell.restserver.http4s.ClientRequestInfo
import org.http4s._
import org.http4s.circe.CirceEntityDecoder._
import org.http4s.circe.CirceEntityEncoder._
import org.http4s.dsl.Http4sDsl
import org.http4s.server.Router
object NotificationRoutes {
def apply[F[_]: Async](
cfg: Config,
backend: BackendApp[F],
user: AuthToken
): HttpRoutes[F] =
Router(
"channel" -> channels(backend, user),
"hook" -> hooks(cfg, backend, user),
"event" -> events(cfg, backend, user)
)
def channels[F[_]: Async](
backend: BackendApp[F],
user: AuthToken
): HttpRoutes[F] = {
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of {
case GET -> Root =>
for {
list <- backend.notification.listChannels(user.account)
data = list.map(NotificationChannel.convert)
resp <- Ok(data)
} yield resp
case DELETE -> Root / Ident(id) =>
for {
res <- backend.notification.deleteChannel(id, user.account)
resp <- Ok(Conversions.basicResult(res, "Channel deleted"))
} yield resp
case req @ POST -> Root =>
for {
input <- req.as[NotificationChannel]
ch <- Sync[F].pure(NotificationChannel.convert(input)).rethrow
res <- backend.notification.createChannel(ch, user.account)
resp <- Ok(Conversions.basicResult(res, "Channel created"))
} yield resp
case req @ PUT -> Root =>
for {
input <- req.as[NotificationChannel]
ch <- Sync[F].pure(NotificationChannel.convert(input)).rethrow
res <- backend.notification.updateChannel(ch, user.account)
resp <- Ok(Conversions.basicResult(res, "Channel created"))
} yield resp
}
}
def hooks[F[_]: Async](
cfg: Config,
backend: BackendApp[F],
user: AuthToken
): HttpRoutes[F] = {
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of {
case GET -> Root =>
for {
list <- backend.notification.listHooks(user.account)
data = list.map(Converters.convertHook)
resp <- Ok(data)
} yield resp
case DELETE -> Root / Ident(id) =>
for {
res <- backend.notification.deleteHook(id, user.account)
resp <- Ok(Conversions.basicResult(res, "Hook deleted."))
} yield resp
case req @ POST -> Root =>
for {
input <- req.as[NotificationHook]
hook <- Sync[F].pure(Converters.convertHook(input)).rethrow
res <- backend.notification.createHook(hook, user.account)
resp <- Ok(Conversions.basicResult(res, "Hook created"))
} yield resp
case req @ PUT -> Root =>
for {
input <- req.as[NotificationHook]
hook <- Sync[F].pure(Converters.convertHook(input)).rethrow
res <- backend.notification.updateHook(hook, user.account)
resp <- Ok(Conversions.basicResult(res, "Hook updated"))
} yield resp
case req @ POST -> Root / "verifyJsonFilter" =>
for {
input <- req.as[StringValue]
res = JsonMiniQuery.parse(input.value)
resp <- Ok(BasicResult(res.isRight, res.fold(identity, _.unsafeAsString)))
} yield resp
case req @ POST -> Root / "sendTestEvent" =>
for {
input <- req.as[NotificationHook]
ch <- Sync[F]
.pure(
input.channel.left
.map(_ => new Exception(s"ChannelRefs not allowed for testing"))
.flatMap(NotificationChannel.convert)
)
.rethrow
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
res <- backend.notification.sendSampleEvent(
input.events.headOption.getOrElse(EventType.all.head),
ch,
user.account,
baseUrl.some
)
resp <- Ok(NotificationChannelTestResult(res.success, res.logMessages.toList))
} yield resp
}
}
def events[F[_]: Async](
cfg: Config,
backend: BackendApp[F],
user: AuthToken
): HttpRoutes[F] = {
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of { case req @ POST -> Root / "sample" =>
for {
input <- req.as[NotificationSampleEventReq]
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
data <- backend.notification.sampleEvent(
input.eventType,
user.account,
baseUrl.some
)
resp <- Ok(data.asJsonWithMessage)
} yield resp
}
}
object Converters {
import docspell.backend.ops.ONotification
def convertHook(h: ONotification.Hook): NotificationHook =
NotificationHook(
h.id,
h.enabled,
h.channel.map(NotificationChannel.convert),
h.allEvents,
h.eventFilter,
h.events
)
def convertHook(h: NotificationHook): Either[Throwable, ONotification.Hook] =
h.channel match {
case Left(cref) =>
Right(
ONotification.Hook(
h.id,
h.enabled,
Left(cref),
h.allEvents,
h.eventFilter,
h.events
)
)
case Right(channel) =>
NotificationChannel
.convert(channel)
.map(ch =>
ONotification
.Hook(h.id, h.enabled, Right(ch), h.allEvents, h.eventFilter, h.events)
)
}
}
}

View File

@ -11,8 +11,10 @@ import cats.effect._
import cats.implicits._
import docspell.backend.BackendApp
import docspell.backend.MailAddressCodec
import docspell.backend.auth.AuthToken
import docspell.common._
import docspell.notification.api.PeriodicDueItemsArgs
import docspell.restapi.model._
import docspell.restserver.Config
import docspell.restserver.conv.Conversions
@ -24,7 +26,7 @@ import org.http4s.circe.CirceEntityDecoder._
import org.http4s.circe.CirceEntityEncoder._
import org.http4s.dsl.Http4sDsl
object NotifyDueItemsRoutes {
object NotifyDueItemsRoutes extends MailAddressCodec {
def apply[F[_]: Async](
cfg: Config,
@ -39,13 +41,13 @@ object NotifyDueItemsRoutes {
case GET -> Root / Ident(id) =>
(for {
task <- ut.findNotifyDueItems(id, UserTaskScope(user.account))
res <- OptionT.liftF(taskToSettings(user.account, backend, task))
res <- OptionT.liftF(taskToSettings(backend, task))
resp <- OptionT.liftF(Ok(res))
} yield resp).getOrElseF(NotFound())
case req @ POST -> Root / "startonce" =>
for {
data <- req.as[NotificationSettings]
data <- req.as[PeriodicDueItemsSettings]
newId <- Ident.randomId[F]
task <- makeTask(newId, getBaseUrl(cfg, req), user.account, data)
res <-
@ -65,7 +67,7 @@ object NotifyDueItemsRoutes {
} yield resp
case req @ PUT -> Root =>
def run(data: NotificationSettings) =
def run(data: PeriodicDueItemsSettings) =
for {
task <- makeTask(data.id, getBaseUrl(cfg, req), user.account, data)
res <-
@ -75,7 +77,7 @@ object NotifyDueItemsRoutes {
resp <- Ok(res)
} yield resp
for {
data <- req.as[NotificationSettings]
data <- req.as[PeriodicDueItemsSettings]
resp <-
if (data.id.isEmpty) Ok(BasicResult(false, "Empty id is not allowed"))
else run(data)
@ -83,7 +85,7 @@ object NotifyDueItemsRoutes {
case req @ POST -> Root =>
for {
data <- req.as[NotificationSettings]
data <- req.as[PeriodicDueItemsSettings]
newId <- Ident.randomId[F]
task <- makeTask(newId, getBaseUrl(cfg, req), user.account, data)
res <-
@ -95,10 +97,9 @@ object NotifyDueItemsRoutes {
case GET -> Root =>
ut.getNotifyDueItems(UserTaskScope(user.account))
.evalMap(task => taskToSettings(user.account, backend, task))
.evalMap(task => taskToSettings(backend, task))
.compile
.toVector
.map(v => NotificationSettingsList(v.toList))
.flatMap(Ok(_))
}
}
@ -110,50 +111,49 @@ object NotifyDueItemsRoutes {
id: Ident,
baseUrl: LenientUri,
user: AccountId,
settings: NotificationSettings
): F[UserTask[NotifyDueItemsArgs]] =
Sync[F].pure(
settings: PeriodicDueItemsSettings
): F[UserTask[PeriodicDueItemsArgs]] =
Sync[F].pure(NotificationChannel.convert(settings.channel)).rethrow.map { channel =>
UserTask(
id,
NotifyDueItemsArgs.taskName,
PeriodicDueItemsArgs.taskName,
settings.enabled,
settings.schedule,
settings.summary,
NotifyDueItemsArgs(
PeriodicDueItemsArgs(
user,
settings.smtpConnection,
settings.recipients,
Some(baseUrl / "app" / "item"),
Right(channel),
settings.remindDays,
if (settings.capOverdue) Some(settings.remindDays)
else None,
settings.tagsInclude.map(_.id),
settings.tagsExclude.map(_.id)
settings.tagsExclude.map(_.id),
Some(baseUrl / "app" / "item")
)
)
)
}
def taskToSettings[F[_]: Sync](
account: AccountId,
backend: BackendApp[F],
task: UserTask[NotifyDueItemsArgs]
): F[NotificationSettings] =
task: UserTask[PeriodicDueItemsArgs]
): F[PeriodicDueItemsSettings] =
for {
tinc <- backend.tag.loadAll(task.args.tagsInclude)
texc <- backend.tag.loadAll(task.args.tagsExclude)
conn <-
backend.mail
.getSmtpSettings(account, None)
.map(
_.find(_.name == task.args.smtpConnection)
.map(_.name)
ch <- task.args.channel match {
case Right(c) => NotificationChannel.convert(c).pure[F]
case Left(ref) =>
Sync[F].raiseError(
new IllegalStateException(s"ChannelRefs are not supported: $ref")
)
} yield NotificationSettings(
}
} yield PeriodicDueItemsSettings(
task.id,
task.enabled,
task.summary,
conn.getOrElse(Ident.unsafe("")),
task.args.recipients,
ch,
task.timer,
task.args.remindDays,
task.args.daysBack.isDefined,

View File

@ -0,0 +1,161 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.restserver.routes
import cats.data.OptionT
import cats.effect._
import cats.implicits._
import docspell.backend.BackendApp
import docspell.backend.MailAddressCodec
import docspell.backend.auth.AuthToken
import docspell.common._
import docspell.notification.api.PeriodicQueryArgs
import docspell.query.ItemQueryParser
import docspell.restapi.model._
import docspell.restserver.Config
import docspell.restserver.conv.Conversions
import docspell.restserver.http4s.ClientRequestInfo
import docspell.store.usertask._
import org.http4s._
import org.http4s.circe.CirceEntityDecoder._
import org.http4s.circe.CirceEntityEncoder._
import org.http4s.dsl.Http4sDsl
object PeriodicQueryRoutes extends MailAddressCodec {
def apply[F[_]: Async](
cfg: Config,
backend: BackendApp[F],
user: AuthToken
): HttpRoutes[F] = {
val dsl = new Http4sDsl[F] {}
val ut = backend.userTask
import dsl._
HttpRoutes.of {
case GET -> Root / Ident(id) =>
(for {
task <- ut.findPeriodicQuery(id, UserTaskScope(user.account))
res <- OptionT.liftF(taskToSettings(task))
resp <- OptionT.liftF(Ok(res))
} yield resp).getOrElseF(NotFound())
case req @ POST -> Root / "startonce" =>
for {
data <- req.as[PeriodicQuerySettings]
newId <- Ident.randomId[F]
task <- makeTask(newId, getBaseUrl(cfg, req), user.account, data)
res <-
ut.executeNow(UserTaskScope(user.account), None, task)
.attempt
.map(Conversions.basicResult(_, "Submitted successfully."))
resp <- Ok(res)
} yield resp
case DELETE -> Root / Ident(id) =>
for {
res <-
ut.deleteTask(UserTaskScope(user.account), id)
.attempt
.map(Conversions.basicResult(_, "Deleted successfully"))
resp <- Ok(res)
} yield resp
case req @ PUT -> Root =>
def run(data: PeriodicQuerySettings) =
for {
task <- makeTask(data.id, getBaseUrl(cfg, req), user.account, data)
res <-
ut.submitPeriodicQuery(UserTaskScope(user.account), None, task)
.attempt
.map(Conversions.basicResult(_, "Saved successfully"))
resp <- Ok(res)
} yield resp
for {
data <- req.as[PeriodicQuerySettings]
resp <-
if (data.id.isEmpty) Ok(BasicResult(false, "Empty id is not allowed"))
else run(data)
} yield resp
case req @ POST -> Root =>
for {
data <- req.as[PeriodicQuerySettings]
newId <- Ident.randomId[F]
task <- makeTask(newId, getBaseUrl(cfg, req), user.account, data)
res <-
ut.submitPeriodicQuery(UserTaskScope(user.account), None, task)
.attempt
.map(Conversions.basicResult(_, "Saved successfully."))
resp <- Ok(res)
} yield resp
case GET -> Root =>
ut.getPeriodicQuery(UserTaskScope(user.account))
.evalMap(task => taskToSettings(task))
.compile
.toVector
.flatMap(Ok(_))
}
}
private def getBaseUrl[F[_]](cfg: Config, req: Request[F]) =
ClientRequestInfo.getBaseUrl(cfg, req)
def makeTask[F[_]: Sync](
id: Ident,
baseUrl: LenientUri,
user: AccountId,
settings: PeriodicQuerySettings
): F[UserTask[PeriodicQueryArgs]] =
Sync[F]
.pure(for {
ch <- NotificationChannel.convert(settings.channel)
qstr <- ItemQueryParser
.asString(settings.query.expr)
.left
.map(err => new IllegalArgumentException(s"Query not renderable: $err"))
} yield (ch, ItemQueryString(qstr)))
.rethrow
.map { case (channel, qstr) =>
UserTask(
id,
PeriodicQueryArgs.taskName,
settings.enabled,
settings.schedule,
settings.summary,
PeriodicQueryArgs(
user,
Right(channel),
qstr,
Some(baseUrl / "app" / "item")
)
)
}
def taskToSettings[F[_]: Sync](
task: UserTask[PeriodicQueryArgs]
): F[PeriodicQuerySettings] =
for {
ch <- task.args.channel match {
case Right(c) => NotificationChannel.convert(c).pure[F]
case Left(ref) =>
Sync[F].raiseError(
new IllegalStateException(s"ChannelRefs are not supported: $ref")
)
}
} yield PeriodicQuerySettings(
task.id,
task.summary,
task.enabled,
ch,
ItemQueryParser.parseUnsafe(task.args.query.query),
task.timer
)
}

View File

@ -57,7 +57,6 @@ object OutputEvent {
private case class Msg[A](tag: String, content: A)
private object Msg {
@scala.annotation.nowarn
implicit def jsonEncoder[A: Encoder]: Encoder[Msg[A]] =
deriveEncoder
}

View File

@ -0,0 +1,62 @@
create table "notification_channel_mail" (
"id" varchar(254) not null primary key,
"uid" varchar(254) not null,
"conn_id" varchar(254) not null,
"recipients" varchar(254) not null,
"created" timestamp not null,
foreign key ("uid") references "user_"("uid") on delete cascade,
foreign key ("conn_id") references "useremail"("id") on delete cascade
);
create table "notification_channel_gotify" (
"id" varchar(254) not null primary key,
"uid" varchar(254) not null,
"url" varchar(254) not null,
"app_key" varchar(254) not null,
"created" timestamp not null,
foreign key ("uid") references "user_"("uid") on delete cascade
);
create table "notification_channel_matrix" (
"id" varchar(254) not null primary key,
"uid" varchar(254) not null,
"home_server" varchar(254) not null,
"room_id" varchar(254) not null,
"access_token" varchar not null,
"message_type" varchar(254) not null,
"created" timestamp not null,
foreign key ("uid") references "user_"("uid") on delete cascade
);
create table "notification_channel_http" (
"id" varchar(254) not null primary key,
"uid" varchar(254) not null,
"url" varchar(254) not null,
"created" timestamp not null,
foreign key ("uid") references "user_"("uid") on delete cascade
);
create table "notification_hook" (
"id" varchar(254) not null primary key,
"uid" varchar(254) not null,
"enabled" boolean not null,
"channel_mail" varchar(254),
"channel_gotify" varchar(254),
"channel_matrix" varchar(254),
"channel_http" varchar(254),
"all_events" boolean not null,
"event_filter" varchar(500),
"created" timestamp not null,
foreign key ("uid") references "user_"("uid") on delete cascade,
foreign key ("channel_mail") references "notification_channel_mail"("id") on delete cascade,
foreign key ("channel_gotify") references "notification_channel_gotify"("id") on delete cascade,
foreign key ("channel_matrix") references "notification_channel_matrix"("id") on delete cascade,
foreign key ("channel_http") references "notification_channel_http"("id") on delete cascade
);
create table "notification_hook_event" (
"id" varchar(254) not null primary key,
"hook_id" varchar(254) not null,
"event_type" varchar(254) not null,
foreign key ("hook_id") references "notification_hook"("id") on delete cascade
);

View File

@ -0,0 +1,62 @@
create table `notification_channel_mail` (
`id` varchar(254) not null primary key,
`uid` varchar(254) not null,
`conn_id` varchar(254) not null,
`recipients` varchar(254) not null,
`created` timestamp not null,
foreign key (`uid`) references `user_`(`uid`) on delete cascade,
foreign key (`conn_id`) references `useremail`(`id`) on delete cascade
);
create table `notification_channel_gotify` (
`id` varchar(254) not null primary key,
`uid` varchar(254) not null,
`url` varchar(254) not null,
`app_key` varchar(254) not null,
`created` timestamp not null,
foreign key (`uid`) references `user_`(`uid`) on delete cascade
);
create table `notification_channel_matrix` (
`id` varchar(254) not null primary key,
`uid` varchar(254) not null,
`home_server` varchar(254) not null,
`room_id` varchar(254) not null,
`access_token` text not null,
`message_type` varchar(254) not null,
`created` timestamp not null,
foreign key (`uid`) references `user_`(`uid`) on delete cascade
);
create table `notification_channel_http` (
`id` varchar(254) not null primary key,
`uid` varchar(254) not null,
`url` varchar(254) not null,
`created` timestamp not null,
foreign key (`uid`) references `user_`(`uid`) on delete cascade
);
create table `notification_hook` (
`id` varchar(254) not null primary key,
`uid` varchar(254) not null,
`enabled` boolean not null,
`channel_mail` varchar(254),
`channel_gotify` varchar(254),
`channel_matrix` varchar(254),
`channel_http` varchar(254),
`all_events` boolean not null,
`event_filter` varchar(500),
`created` timestamp not null,
foreign key (`uid`) references `user_`(`uid`) on delete cascade,
foreign key (`channel_mail`) references `notification_channel_mail`(`id`) on delete cascade,
foreign key (`channel_gotify`) references `notification_channel_gotify`(`id`) on delete cascade,
foreign key (`channel_matrix`) references `notification_channel_matrix`(`id`) on delete cascade,
foreign key (`channel_http`) references `notification_channel_http`(`id`) on delete cascade
);
create table `notification_hook_event` (
`id` varchar(254) not null primary key,
`hook_id` varchar(254) not null,
`event_type` varchar(254) not null,
foreign key (`hook_id`) references `notification_hook`(`id`) on delete cascade
);

View File

@ -0,0 +1,62 @@
create table "notification_channel_mail" (
"id" varchar(254) not null primary key,
"uid" varchar(254) not null,
"conn_id" varchar(254) not null,
"recipients" varchar(254) not null,
"created" timestamp not null,
foreign key ("uid") references "user_"("uid") on delete cascade,
foreign key ("conn_id") references "useremail"("id") on delete cascade
);
create table "notification_channel_gotify" (
"id" varchar(254) not null primary key,
"uid" varchar(254) not null,
"url" varchar(254) not null,
"app_key" varchar(254) not null,
"created" timestamp not null,
foreign key ("uid") references "user_"("uid") on delete cascade
);
create table "notification_channel_matrix" (
"id" varchar(254) not null primary key,
"uid" varchar(254) not null,
"home_server" varchar(254) not null,
"room_id" varchar(254) not null,
"access_token" varchar not null,
"message_type" varchar(254) not null,
"created" timestamp not null,
foreign key ("uid") references "user_"("uid") on delete cascade
);
create table "notification_channel_http" (
"id" varchar(254) not null primary key,
"uid" varchar(254) not null,
"url" varchar(254) not null,
"created" timestamp not null,
foreign key ("uid") references "user_"("uid") on delete cascade
);
create table "notification_hook" (
"id" varchar(254) not null primary key,
"uid" varchar(254) not null,
"enabled" boolean not null,
"channel_mail" varchar(254),
"channel_gotify" varchar(254),
"channel_matrix" varchar(254),
"channel_http" varchar(254),
"all_events" boolean not null,
"event_filter" varchar(500),
"created" timestamp not null,
foreign key ("uid") references "user_"("uid") on delete cascade,
foreign key ("channel_mail") references "notification_channel_mail"("id") on delete cascade,
foreign key ("channel_gotify") references "notification_channel_gotify"("id") on delete cascade,
foreign key ("channel_matrix") references "notification_channel_matrix"("id") on delete cascade,
foreign key ("channel_http") references "notification_channel_http"("id") on delete cascade
);
create table "notification_hook_event" (
"id" varchar(254) not null primary key,
"hook_id" varchar(254) not null,
"event_type" varchar(254) not null,
foreign key ("hook_id") references "notification_hook"("id") on delete cascade
);

View File

@ -0,0 +1,87 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration
import cats.data.NonEmptyList
import cats.effect.{IO, Sync}
import cats.implicits._
import docspell.common._
import docspell.common.syntax.StringSyntax._
import docspell.notification.api.Channel
import docspell.notification.api.PeriodicDueItemsArgs
import docspell.store.records.RPeriodicTask
import doobie._
import doobie.implicits._
import doobie.util.transactor.Strategy
import emil.MailAddress
import emil.javamail.syntax._
import io.circe.Encoder
import io.circe.syntax._
import org.flywaydb.core.api.migration.Context
trait MigrationTasks {
def logger: org.log4s.Logger
implicit val jsonEncoder: Encoder[MailAddress] =
Encoder.encodeString.contramap(_.asUnicodeString)
def migrateDueItemTasks: ConnectionIO[Unit] =
for {
tasks <- RPeriodicTask.findByTask(NotifyDueItemsArgs.taskName)
_ <- Sync[ConnectionIO].delay(
logger.info(s"Starting to migrate ${tasks.size} user tasks")
)
_ <- tasks.traverse(migrateDueItemTask1)
_ <- RPeriodicTask.setEnabledByTask(NotifyDueItemsArgs.taskName, false)
} yield ()
def migrateDueItemTask1(old: RPeriodicTask): ConnectionIO[Int] = {
val converted = old.args
.parseJsonAs[NotifyDueItemsArgs]
.leftMap(_.getMessage())
.flatMap(convertArgs)
converted match {
case Right(args) =>
Sync[ConnectionIO].delay(logger.info(s"Converting user task: $old")) *>
RPeriodicTask.updateTask(
old.id,
PeriodicDueItemsArgs.taskName,
args.asJson.noSpaces
)
case Left(err) =>
logger.error(s"Error converting user task: $old. $err")
0.pure[ConnectionIO]
}
}
def convertArgs(old: NotifyDueItemsArgs): Either[String, PeriodicDueItemsArgs] =
old.recipients
.traverse(MailAddress.parse)
.flatMap(l => NonEmptyList.fromList(l).toRight("No recipients provided"))
.map { rec =>
PeriodicDueItemsArgs(
old.account,
Right(Channel.Mail(Ident.unsafe(""), old.smtpConnection, rec)),
old.remindDays,
old.daysBack,
old.tagsInclude,
old.tagsExclude,
old.itemDetailUrl
)
}
def mkTransactor(ctx: Context): Transactor[IO] = {
val xa = Transactor.fromConnection[IO](ctx.getConnection())
Transactor.strategy.set(xa, Strategy.void) //transactions are handled by flyway
}
}

View File

@ -0,0 +1,23 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration.h2
import cats.effect.unsafe.implicits._
import db.migration.MigrationTasks
import doobie.implicits._
import org.flywaydb.core.api.migration.BaseJavaMigration
import org.flywaydb.core.api.migration.Context
class V1_29_2__MigrateNotifyTask extends BaseJavaMigration with MigrationTasks {
val logger = org.log4s.getLogger
override def migrate(ctx: Context): Unit = {
val xa = mkTransactor(ctx)
migrateDueItemTasks.transact(xa).unsafeRunSync()
}
}

View File

@ -0,0 +1,23 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration.mariadb
import cats.effect.unsafe.implicits._
import db.migration.MigrationTasks
import doobie.implicits._
import org.flywaydb.core.api.migration.BaseJavaMigration
import org.flywaydb.core.api.migration.Context
class V1_29_2__MigrateNotifyTask extends BaseJavaMigration with MigrationTasks {
val logger = org.log4s.getLogger
override def migrate(ctx: Context): Unit = {
val xa = mkTransactor(ctx)
migrateDueItemTasks.transact(xa).unsafeRunSync()
}
}

View File

@ -0,0 +1,23 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration.postgresql
import cats.effect.unsafe.implicits._
import db.migration.MigrationTasks
import doobie.implicits._
import org.flywaydb.core.api.migration.BaseJavaMigration
import org.flywaydb.core.api.migration.Context
class V1_29_2__MigrateNotifyTask extends BaseJavaMigration with MigrationTasks {
val logger = org.log4s.getLogger
override def migrate(ctx: Context): Unit = {
val xa = mkTransactor(ctx)
migrateDueItemTasks.transact(xa).unsafeRunSync()
}
}

View File

@ -11,6 +11,8 @@ import java.time.{Instant, LocalDate}
import docspell.common._
import docspell.common.syntax.all._
import docspell.jsonminiq.JsonMiniQuery
import docspell.notification.api.EventType
import docspell.query.{ItemQuery, ItemQueryParser}
import docspell.totp.Key
@ -148,6 +150,12 @@ trait DoobieMeta extends EmilDoobieMeta {
Meta[String].timap(s => ItemQueryParser.parseUnsafe(s))(q =>
q.raw.getOrElse(ItemQueryParser.unsafeAsString(q.expr))
)
implicit val metaEventType: Meta[EventType] =
Meta[String].timap(EventType.unsafeFromString)(_.name)
implicit val metaJsonMiniQuery: Meta[JsonMiniQuery] =
Meta[String].timap(JsonMiniQuery.unsafeParse)(_.unsafeAsString)
}
object DoobieMeta extends DoobieMeta {

View File

@ -22,12 +22,12 @@ object FlywayMigrate {
logger.info("Running db migrations...")
val locations = jdbc.dbmsName match {
case Some(dbtype) =>
List(s"classpath:db/migration/$dbtype")
List(s"classpath:db/migration/$dbtype", "classpath:db/migration/common")
case None =>
logger.warn(
s"Cannot read database name from jdbc url: ${jdbc.url}. Go with H2"
)
List("classpath:db/h2")
List("classpath:db/migration/h2", "classpath:db/migration/common")
}
logger.info(s"Using migration locations: $locations")

View File

@ -0,0 +1,44 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.store.queries
import cats.data.{NonEmptyList, OptionT}
import cats.effect._
import docspell.notification.api.NotificationChannel
import docspell.store.records._
import doobie.ConnectionIO
object ChannelMap {
def readMail(r: RNotificationChannelMail): ConnectionIO[Vector[NotificationChannel]] =
(for {
em <- OptionT(RUserEmail.getById(r.connection))
rec <- OptionT.fromOption[ConnectionIO](NonEmptyList.fromList(r.recipients))
ch = NotificationChannel.Email(em.toMailConfig, em.mailFrom, rec)
} yield Vector(ch)).getOrElse(Vector.empty)
def readGotify(
r: RNotificationChannelGotify
): ConnectionIO[Vector[NotificationChannel]] =
pure(NotificationChannel.Gotify(r.url, r.appKey))
def readMatrix(
r: RNotificationChannelMatrix
): ConnectionIO[Vector[NotificationChannel]] =
pure(NotificationChannel.Matrix(r.homeServer, r.roomId, r.accessToken, r.messageType))
def readHttp(
r: RNotificationChannelHttp
): ConnectionIO[Vector[NotificationChannel]] =
pure(NotificationChannel.HttpPost(r.url, Map.empty))
private def pure[A](a: A): ConnectionIO[Vector[A]] =
Sync[ConnectionIO].pure(Vector(a))
}

View File

@ -0,0 +1,91 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.store.queries
import cats.Monad
import cats.data.OptionT
import cats.implicits._
import docspell.common._
import docspell.notification.api._
import docspell.store.qb.DSL._
import docspell.store.qb.Select
import docspell.store.records._
import doobie._
object QNotification {
private val hook = RNotificationHook.as("nh")
private val hevent = RNotificationHookEvent.as("ne")
private val user = RUser.as("u")
def findChannelsForEvent(event: Event): ConnectionIO[Vector[HookChannel]] =
for {
hooks <- listHooks(event.account.collective, event.eventType)
chs <- hooks.traverse(readHookChannel)
} yield chs
// --
final case class HookChannel(
hook: RNotificationHook,
channels: Vector[NotificationChannel]
)
def listHooks(
collective: Ident,
eventType: EventType
): ConnectionIO[Vector[RNotificationHook]] =
run(
select(hook.all),
from(hook).leftJoin(hevent, hevent.hookId === hook.id),
hook.enabled === true && (hook.allEvents === true || hevent.eventType === eventType) && hook.uid
.in(
Select(select(user.uid), from(user), user.cid === collective)
)
).query[RNotificationHook].to[Vector]
def readHookChannel(
hook: RNotificationHook
): ConnectionIO[HookChannel] =
for {
c1 <- read(hook.channelMail)(RNotificationChannelMail.getById)(
ChannelMap.readMail
)
c2 <- read(hook.channelGotify)(RNotificationChannelGotify.getById)(
ChannelMap.readGotify
)
c3 <- read(hook.channelMatrix)(RNotificationChannelMatrix.getById)(
ChannelMap.readMatrix
)
c4 <- read(hook.channelHttp)(RNotificationChannelHttp.getById)(ChannelMap.readHttp)
} yield HookChannel(hook, c1 ++ c2 ++ c3 ++ c4)
def readChannel(ch: RNotificationChannel): ConnectionIO[Vector[NotificationChannel]] =
ch.fold(
ChannelMap.readMail,
ChannelMap.readGotify,
ChannelMap.readMatrix,
ChannelMap.readHttp
)
private def read[A, B](channel: Option[Ident])(
load: Ident => ConnectionIO[Option[A]]
)(
m: A => ConnectionIO[Vector[B]]
): ConnectionIO[Vector[B]] =
channel match {
case Some(ch) =>
(for {
a <- OptionT(load(ch))
ch <- OptionT.liftF(m(a))
} yield ch).getOrElse(Vector.empty)
case None =>
Monad[ConnectionIO].pure(Vector.empty)
}
}

View File

@ -410,6 +410,14 @@ object RItem {
def findByIdAndCollective(itemId: Ident, coll: Ident): ConnectionIO[Option[RItem]] =
run(select(T.all), from(T), T.id === itemId && T.cid === coll).query[RItem].option
def findAllByIdAndCollective(
itemIds: NonEmptyList[Ident],
coll: Ident
): ConnectionIO[Vector[RItem]] =
run(select(T.all), from(T), T.id.in(itemIds) && T.cid === coll)
.query[RItem]
.to[Vector]
def findById(itemId: Ident): ConnectionIO[Option[RItem]] =
run(select(T.all), from(T), T.id === itemId).query[RItem].option

View File

@ -0,0 +1,148 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.store.records
import cats.data.OptionT
import docspell.common._
import docspell.notification.api.ChannelRef
import docspell.notification.api.ChannelType
import doobie._
sealed trait RNotificationChannel {
def id: Ident
def fold[A](
f1: RNotificationChannelMail => A,
f2: RNotificationChannelGotify => A,
f3: RNotificationChannelMatrix => A,
f4: RNotificationChannelHttp => A
): A
}
object RNotificationChannel {
final case class Email(r: RNotificationChannelMail) extends RNotificationChannel {
override def fold[A](
f1: RNotificationChannelMail => A,
f2: RNotificationChannelGotify => A,
f3: RNotificationChannelMatrix => A,
f4: RNotificationChannelHttp => A
): A = f1(r)
val id = r.id
}
final case class Gotify(r: RNotificationChannelGotify) extends RNotificationChannel {
override def fold[A](
f1: RNotificationChannelMail => A,
f2: RNotificationChannelGotify => A,
f3: RNotificationChannelMatrix => A,
f4: RNotificationChannelHttp => A
): A = f2(r)
val id = r.id
}
final case class Matrix(r: RNotificationChannelMatrix) extends RNotificationChannel {
override def fold[A](
f1: RNotificationChannelMail => A,
f2: RNotificationChannelGotify => A,
f3: RNotificationChannelMatrix => A,
f4: RNotificationChannelHttp => A
): A = f3(r)
val id = r.id
}
final case class Http(r: RNotificationChannelHttp) extends RNotificationChannel {
override def fold[A](
f1: RNotificationChannelMail => A,
f2: RNotificationChannelGotify => A,
f3: RNotificationChannelMatrix => A,
f4: RNotificationChannelHttp => A
): A = f4(r)
val id = r.id
}
def insert(r: RNotificationChannel): ConnectionIO[Int] =
r.fold(
RNotificationChannelMail.insert,
RNotificationChannelGotify.insert,
RNotificationChannelMatrix.insert,
RNotificationChannelHttp.insert
)
def update(r: RNotificationChannel): ConnectionIO[Int] =
r.fold(
RNotificationChannelMail.update,
RNotificationChannelGotify.update,
RNotificationChannelMatrix.update,
RNotificationChannelHttp.update
)
def getByAccount(account: AccountId): ConnectionIO[Vector[RNotificationChannel]] =
for {
mail <- RNotificationChannelMail.getByAccount(account)
gotify <- RNotificationChannelGotify.getByAccount(account)
matrix <- RNotificationChannelMatrix.getByAccount(account)
http <- RNotificationChannelHttp.getByAccount(account)
} yield mail.map(Email.apply) ++ gotify.map(Gotify.apply) ++ matrix.map(
Matrix.apply
) ++ http.map(Http.apply)
def getById(id: Ident): ConnectionIO[Vector[RNotificationChannel]] =
for {
mail <- RNotificationChannelMail.getById(id)
gotify <- RNotificationChannelGotify.getById(id)
matrix <- RNotificationChannelMatrix.getById(id)
http <- RNotificationChannelHttp.getById(id)
} yield mail.map(Email.apply).toVector ++
gotify.map(Gotify.apply).toVector ++
matrix.map(Matrix.apply).toVector ++
http.map(Http.apply).toVector
def getByRef(ref: ChannelRef): ConnectionIO[Option[RNotificationChannel]] =
ref.channelType match {
case ChannelType.Mail =>
RNotificationChannelMail.getById(ref.id).map(_.map(Email.apply))
case ChannelType.Matrix =>
RNotificationChannelMatrix.getById(ref.id).map(_.map(Matrix.apply))
case ChannelType.Gotify =>
RNotificationChannelGotify.getById(ref.id).map(_.map(Gotify.apply))
case ChannelType.Http =>
RNotificationChannelHttp.getById(ref.id).map(_.map(Http.apply))
}
def getByHook(r: RNotificationHook): ConnectionIO[Vector[RNotificationChannel]] = {
def opt(id: Option[Ident]): OptionT[ConnectionIO, Ident] =
OptionT.fromOption(id)
for {
mail <- opt(r.channelMail).flatMapF(RNotificationChannelMail.getById).value
gotify <- opt(r.channelGotify).flatMapF(RNotificationChannelGotify.getById).value
matrix <- opt(r.channelMatrix).flatMapF(RNotificationChannelMatrix.getById).value
http <- opt(r.channelHttp).flatMapF(RNotificationChannelHttp.getById).value
} yield mail.map(Email.apply).toVector ++
gotify.map(Gotify.apply).toVector ++
matrix.map(Matrix.apply).toVector ++
http.map(Http.apply).toVector
}
def deleteByAccount(id: Ident, account: AccountId): ConnectionIO[Int] =
for {
n1 <- RNotificationChannelMail.deleteByAccount(id, account)
n2 <- RNotificationChannelGotify.deleteByAccount(id, account)
n3 <- RNotificationChannelMatrix.deleteByAccount(id, account)
n4 <- RNotificationChannelHttp.deleteByAccount(id, account)
} yield n1 + n2 + n3 + n4
}

View File

@ -0,0 +1,86 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.store.records
import cats.data.NonEmptyList
import docspell.common._
import docspell.store.qb.DSL._
import docspell.store.qb._
import doobie._
import doobie.implicits._
final case class RNotificationChannelGotify(
id: Ident,
uid: Ident,
url: LenientUri,
appKey: Password,
created: Timestamp
) {
def vary: RNotificationChannel =
RNotificationChannel.Gotify(this)
}
object RNotificationChannelGotify {
final case class Table(alias: Option[String]) extends TableDef {
val tableName = "notification_channel_gotify"
val id = Column[Ident]("id", this)
val uid = Column[Ident]("uid", this)
val url = Column[LenientUri]("url", this)
val appKey = Column[Password]("app_key", this)
val created = Column[Timestamp]("created", this)
val all: NonEmptyList[Column[_]] =
NonEmptyList.of(id, uid, url, appKey, created)
}
val T: Table = Table(None)
def as(alias: String): Table =
Table(Some(alias))
def getById(id: Ident): ConnectionIO[Option[RNotificationChannelGotify]] =
run(select(T.all), from(T), T.id === id).query[RNotificationChannelGotify].option
def insert(r: RNotificationChannelGotify): ConnectionIO[Int] =
DML.insert(T, T.all, sql"${r.id},${r.uid},${r.url},${r.appKey},${r.created}")
def update(r: RNotificationChannelGotify): ConnectionIO[Int] =
DML.update(
T,
T.id === r.id && T.uid === r.uid,
DML.set(
T.url.setTo(r.url),
T.appKey.setTo(r.appKey)
)
)
def getByAccount(
account: AccountId
): ConnectionIO[Vector[RNotificationChannelGotify]] = {
val user = RUser.as("u")
val gotify = as("c")
Select(
select(gotify.all),
from(gotify).innerJoin(user, user.uid === gotify.uid),
user.cid === account.collective && user.login === account.user
).build.query[RNotificationChannelGotify].to[Vector]
}
def deleteById(id: Ident): ConnectionIO[Int] =
DML.delete(T, T.id === id)
def deleteByAccount(id: Ident, account: AccountId): ConnectionIO[Int] = {
val u = RUser.as("u")
DML.delete(
T,
T.id === id && T.uid.in(Select(select(u.uid), from(u), u.isAccount(account)))
)
}
}

View File

@ -0,0 +1,75 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.store.records
import cats.data.NonEmptyList
import docspell.common._
import docspell.store.qb.DSL._
import docspell.store.qb._
import doobie._
import doobie.implicits._
final case class RNotificationChannelHttp(
id: Ident,
uid: Ident,
url: LenientUri,
created: Timestamp
) {
def vary: RNotificationChannel =
RNotificationChannel.Http(this)
}
object RNotificationChannelHttp {
final case class Table(alias: Option[String]) extends TableDef {
val tableName = "notification_channel_http"
val id = Column[Ident]("id", this)
val uid = Column[Ident]("uid", this)
val url = Column[LenientUri]("url", this)
val created = Column[Timestamp]("created", this)
val all: NonEmptyList[Column[_]] =
NonEmptyList.of(id, uid, url, created)
}
val T: Table = Table(None)
def as(alias: String): Table =
Table(Some(alias))
def getById(id: Ident): ConnectionIO[Option[RNotificationChannelHttp]] =
run(select(T.all), from(T), T.id === id).query[RNotificationChannelHttp].option
def insert(r: RNotificationChannelHttp): ConnectionIO[Int] =
DML.insert(T, T.all, sql"${r.id},${r.uid},${r.url},${r.created}")
def update(r: RNotificationChannelHttp): ConnectionIO[Int] =
DML.update(T, T.id === r.id && T.uid === r.uid, DML.set(T.url.setTo(r.url)))
def getByAccount(account: AccountId): ConnectionIO[Vector[RNotificationChannelHttp]] = {
val user = RUser.as("u")
val http = as("c")
Select(
select(http.all),
from(http).innerJoin(user, user.uid === http.uid),
user.cid === account.collective && user.login === account.user
).build.query[RNotificationChannelHttp].to[Vector]
}
def deleteById(id: Ident): ConnectionIO[Int] =
DML.delete(T, T.id === id)
def deleteByAccount(id: Ident, account: AccountId): ConnectionIO[Int] = {
val u = RUser.as("u")
DML.delete(
T,
T.id === id && T.uid.in(Select(select(u.uid), from(u), u.isAccount(account)))
)
}
}

View File

@ -0,0 +1,88 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.store.records
import cats.data.NonEmptyList
import docspell.common._
import docspell.store.qb.DSL._
import docspell.store.qb._
import doobie._
import doobie.implicits._
import emil.MailAddress
final case class RNotificationChannelMail(
id: Ident,
uid: Ident,
connection: Ident,
recipients: List[MailAddress],
created: Timestamp
) {
def vary: RNotificationChannel =
RNotificationChannel.Email(this)
}
object RNotificationChannelMail {
final case class Table(alias: Option[String]) extends TableDef {
val tableName = "notification_channel_mail"
val id = Column[Ident]("id", this)
val uid = Column[Ident]("uid", this)
val connection = Column[Ident]("conn_id", this)
val recipients = Column[List[MailAddress]]("recipients", this)
val created = Column[Timestamp]("created", this)
val all: NonEmptyList[Column[_]] =
NonEmptyList.of(id, uid, connection, recipients, created)
}
val T: Table = Table(None)
def as(alias: String): Table = Table(Some(alias))
def insert(r: RNotificationChannelMail): ConnectionIO[Int] =
DML.insert(
T,
T.all,
sql"${r.id},${r.uid},${r.connection},${r.recipients},${r.created}"
)
def update(r: RNotificationChannelMail): ConnectionIO[Int] =
DML.update(
T,
T.id === r.id && T.uid === r.uid,
DML.set(
T.connection.setTo(r.connection),
T.recipients.setTo(r.recipients.toList)
)
)
def getById(id: Ident): ConnectionIO[Option[RNotificationChannelMail]] =
run(select(T.all), from(T), T.id === id).query[RNotificationChannelMail].option
def getByAccount(account: AccountId): ConnectionIO[Vector[RNotificationChannelMail]] = {
val user = RUser.as("u")
val gotify = as("c")
Select(
select(gotify.all),
from(gotify).innerJoin(user, user.uid === gotify.uid),
user.cid === account.collective && user.login === account.user
).build.query[RNotificationChannelMail].to[Vector]
}
def deleteById(id: Ident): ConnectionIO[Int] =
DML.delete(T, T.id === id)
def deleteByAccount(id: Ident, account: AccountId): ConnectionIO[Int] = {
val u = RUser.as("u")
DML.delete(
T,
T.id === id && T.uid.in(Select(select(u.uid), from(u), u.isAccount(account)))
)
}
}

Some files were not shown because too many files have changed in this diff Show More