mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-06-22 02:18:26 +00:00
Experiment with addons
Addons allow to execute external programs in some context inside docspell. Currently it is possible to run them after processing files. Addons are provided by URLs to zip files.
This commit is contained in:
@ -20,6 +20,7 @@ trait AttachedEvent[R] {
|
||||
|
||||
object AttachedEvent {
|
||||
|
||||
/** Only the result, no events. */
|
||||
def only[R](v: R): AttachedEvent[R] =
|
||||
new AttachedEvent[R] {
|
||||
val value = v
|
||||
|
@ -8,11 +8,14 @@ package docspell.backend
|
||||
|
||||
import cats.effect._
|
||||
|
||||
import docspell.backend.BackendCommands.EventContext
|
||||
import docspell.backend.auth.Login
|
||||
import docspell.backend.fulltext.CreateIndex
|
||||
import docspell.backend.ops._
|
||||
import docspell.backend.signup.OSignup
|
||||
import docspell.common.bc.BackendCommandRunner
|
||||
import docspell.ftsclient.FtsClient
|
||||
import docspell.joexapi.client.JoexClient
|
||||
import docspell.notification.api.{EventExchange, NotificationModule}
|
||||
import docspell.pubsub.api.PubSubT
|
||||
import docspell.scheduler.JobStoreModule
|
||||
@ -20,6 +23,7 @@ import docspell.store.Store
|
||||
import docspell.totp.Totp
|
||||
|
||||
import emil.Emil
|
||||
import org.http4s.client.Client
|
||||
|
||||
trait BackendApp[F[_]] {
|
||||
|
||||
@ -35,6 +39,7 @@ trait BackendApp[F[_]] {
|
||||
def job: OJob[F]
|
||||
def item: OItem[F]
|
||||
def itemSearch: OItemSearch[F]
|
||||
def attachment: OAttachment[F]
|
||||
def fulltext: OFulltext[F]
|
||||
def mail: OMail[F]
|
||||
def joex: OJoex[F]
|
||||
@ -52,23 +57,30 @@ trait BackendApp[F[_]] {
|
||||
def fileRepository: OFileRepository[F]
|
||||
def itemLink: OItemLink[F]
|
||||
def downloadAll: ODownloadAll[F]
|
||||
def addons: OAddons[F]
|
||||
|
||||
def commands(eventContext: Option[EventContext]): BackendCommandRunner[F, Unit]
|
||||
}
|
||||
|
||||
object BackendApp {
|
||||
|
||||
def create[F[_]: Async](
|
||||
cfg: Config,
|
||||
store: Store[F],
|
||||
javaEmil: Emil[F],
|
||||
httpClient: Client[F],
|
||||
ftsClient: FtsClient[F],
|
||||
pubSubT: PubSubT[F],
|
||||
schedulerModule: JobStoreModule[F],
|
||||
notificationMod: NotificationModule[F]
|
||||
): Resource[F, BackendApp[F]] =
|
||||
for {
|
||||
nodeImpl <- ONode(store)
|
||||
totpImpl <- OTotp(store, Totp.default)
|
||||
loginImpl <- Login[F](store, Totp.default)
|
||||
signupImpl <- OSignup[F](store)
|
||||
joexImpl <- OJoex(pubSubT)
|
||||
joexClient = JoexClient(httpClient)
|
||||
joexImpl <- OJoex(pubSubT, nodeImpl, joexClient)
|
||||
collImpl <- OCollective[F](
|
||||
store,
|
||||
schedulerModule.userTasks,
|
||||
@ -80,7 +92,6 @@ object BackendApp {
|
||||
equipImpl <- OEquipment[F](store)
|
||||
orgImpl <- OOrganization(store)
|
||||
uploadImpl <- OUpload(store, schedulerModule.jobs)
|
||||
nodeImpl <- ONode(store)
|
||||
jobImpl <- OJob(store, joexImpl, pubSubT)
|
||||
createIndex <- CreateIndex.resource(ftsClient, store)
|
||||
itemImpl <- OItem(store, ftsClient, createIndex, schedulerModule.jobs)
|
||||
@ -109,6 +120,16 @@ object BackendApp {
|
||||
fileRepoImpl <- OFileRepository(store, schedulerModule.jobs)
|
||||
itemLinkImpl <- Resource.pure(OItemLink(store, itemSearchImpl))
|
||||
downloadAllImpl <- Resource.pure(ODownloadAll(store, jobImpl, schedulerModule.jobs))
|
||||
attachImpl <- Resource.pure(OAttachment(store, ftsClient, schedulerModule.jobs))
|
||||
addonsImpl <- Resource.pure(
|
||||
OAddons(
|
||||
cfg.addons,
|
||||
store,
|
||||
schedulerModule.userTasks,
|
||||
schedulerModule.jobs,
|
||||
joexImpl
|
||||
)
|
||||
)
|
||||
} yield new BackendApp[F] {
|
||||
val pubSub = pubSubT
|
||||
val login = loginImpl
|
||||
@ -139,5 +160,10 @@ object BackendApp {
|
||||
val fileRepository = fileRepoImpl
|
||||
val itemLink = itemLinkImpl
|
||||
val downloadAll = downloadAllImpl
|
||||
val addons = addonsImpl
|
||||
val attachment = attachImpl
|
||||
|
||||
def commands(eventContext: Option[EventContext]) =
|
||||
BackendCommands.fromBackend(this, eventContext)
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,175 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend
|
||||
|
||||
import cats.data.{NonEmptyList => Nel}
|
||||
import cats.effect.Sync
|
||||
import cats.syntax.all._
|
||||
|
||||
import docspell.backend.BackendCommands.EventContext
|
||||
import docspell.backend.ops.OCustomFields.SetValue
|
||||
import docspell.backend.ops._
|
||||
import docspell.common.bc._
|
||||
import docspell.common.{AccountId, Ident, LenientUri}
|
||||
|
||||
private[backend] class BackendCommands[F[_]: Sync](
|
||||
itemOps: OItem[F],
|
||||
attachOps: OAttachment[F],
|
||||
fieldOps: OCustomFields[F],
|
||||
notificationOps: ONotification[F],
|
||||
eventContext: Option[EventContext]
|
||||
) extends BackendCommandRunner[F, Unit] {
|
||||
private[this] val logger = docspell.logging.getLogger[F]
|
||||
|
||||
def run(collective: Ident, cmd: BackendCommand): F[Unit] =
|
||||
doRun(collective, cmd).attempt.flatMap {
|
||||
case Right(_) => ().pure[F]
|
||||
case Left(ex) =>
|
||||
logger.error(ex)(s"Backend command $cmd failed for collective ${collective.id}.")
|
||||
}
|
||||
|
||||
def doRun(collective: Ident, cmd: BackendCommand): F[Unit] =
|
||||
cmd match {
|
||||
case BackendCommand.ItemUpdate(item, actions) =>
|
||||
actions.traverse_(a => runItemAction(collective, item, a))
|
||||
|
||||
case BackendCommand.AttachmentUpdate(item, attach, actions) =>
|
||||
actions.traverse_(a => runAttachAction(collective, item, attach, a))
|
||||
}
|
||||
|
||||
def runAll(collective: Ident, cmds: List[BackendCommand]): F[Unit] =
|
||||
cmds.traverse_(run(collective, _))
|
||||
|
||||
def runItemAction(collective: Ident, item: Ident, action: ItemAction): F[Unit] =
|
||||
action match {
|
||||
case ItemAction.AddTags(tags) =>
|
||||
logger.debug(s"Setting tags $tags on ${item.id} for ${collective.id}") *>
|
||||
itemOps
|
||||
.linkTags(item, tags.toList, collective)
|
||||
.flatMap(sendEvents)
|
||||
|
||||
case ItemAction.RemoveTags(tags) =>
|
||||
logger.debug(s"Remove tags $tags on ${item.id} for ${collective.id}") *>
|
||||
itemOps
|
||||
.removeTagsMultipleItems(Nel.of(item), tags.toList, collective)
|
||||
.flatMap(sendEvents)
|
||||
|
||||
case ItemAction.ReplaceTags(tags) =>
|
||||
logger.debug(s"Replace tags $tags on ${item.id} for ${collective.id}") *>
|
||||
itemOps
|
||||
.setTags(item, tags.toList, collective)
|
||||
.flatMap(sendEvents)
|
||||
|
||||
case ItemAction.SetFolder(folder) =>
|
||||
logger.debug(s"Set folder $folder on ${item.id} for ${collective.id}") *>
|
||||
itemOps
|
||||
.setFolder(item, folder, collective)
|
||||
.void
|
||||
|
||||
case ItemAction.RemoveTagsCategory(cats) =>
|
||||
logger.debug(
|
||||
s"Remove tags in categories $cats on ${item.id} for ${collective.id}"
|
||||
) *>
|
||||
itemOps
|
||||
.removeTagsOfCategories(item, collective, cats)
|
||||
.flatMap(sendEvents)
|
||||
|
||||
case ItemAction.SetCorrOrg(id) =>
|
||||
logger.debug(
|
||||
s"Set correspondent organization ${id.map(_.id)} for ${collective.id}"
|
||||
) *>
|
||||
itemOps.setCorrOrg(Nel.of(item), id, collective).void
|
||||
|
||||
case ItemAction.SetCorrPerson(id) =>
|
||||
logger.debug(
|
||||
s"Set correspondent person ${id.map(_.id)} for ${collective.id}"
|
||||
) *>
|
||||
itemOps.setCorrPerson(Nel.of(item), id, collective).void
|
||||
|
||||
case ItemAction.SetConcPerson(id) =>
|
||||
logger.debug(
|
||||
s"Set concerning person ${id.map(_.id)} for ${collective.id}"
|
||||
) *>
|
||||
itemOps.setConcPerson(Nel.of(item), id, collective).void
|
||||
|
||||
case ItemAction.SetConcEquipment(id) =>
|
||||
logger.debug(
|
||||
s"Set concerning equipment ${id.map(_.id)} for ${collective.id}"
|
||||
) *>
|
||||
itemOps.setConcEquip(Nel.of(item), id, collective).void
|
||||
|
||||
case ItemAction.SetField(field, value) =>
|
||||
logger.debug(
|
||||
s"Set field on item ${item.id} ${field.id} to '$value' for ${collective.id}"
|
||||
) *>
|
||||
fieldOps
|
||||
.setValue(item, SetValue(field, value, collective))
|
||||
.flatMap(sendEvents)
|
||||
|
||||
case ItemAction.SetNotes(notes) =>
|
||||
logger.debug(s"Set notes on item ${item.id} for ${collective.id}") *>
|
||||
itemOps.setNotes(item, notes, collective).void
|
||||
|
||||
case ItemAction.AddNotes(notes, sep) =>
|
||||
logger.debug(s"Add notes on item ${item.id} for ${collective.id}") *>
|
||||
itemOps.addNotes(item, notes, sep, collective).void
|
||||
|
||||
case ItemAction.SetName(name) =>
|
||||
logger.debug(s"Set name '$name' on item ${item.id} for ${collective.id}") *>
|
||||
itemOps.setName(item, name, collective).void
|
||||
}
|
||||
|
||||
def runAttachAction(
|
||||
collective: Ident,
|
||||
itemId: Ident,
|
||||
attachId: Ident,
|
||||
action: AttachmentAction
|
||||
): F[Unit] =
|
||||
action match {
|
||||
case AttachmentAction.SetExtractedText(text) =>
|
||||
attachOps.setExtractedText(
|
||||
collective,
|
||||
itemId,
|
||||
attachId,
|
||||
text.getOrElse("").pure[F]
|
||||
)
|
||||
}
|
||||
|
||||
private def sendEvents(result: AttachedEvent[_]): F[Unit] =
|
||||
eventContext match {
|
||||
case Some(ctx) =>
|
||||
notificationOps.offerEvents(result.event(ctx.account, ctx.baseUrl))
|
||||
case None => ().pure[F]
|
||||
}
|
||||
}
|
||||
|
||||
object BackendCommands {
|
||||
|
||||
/** If supplied, notification events will be send. */
|
||||
case class EventContext(account: AccountId, baseUrl: Option[LenientUri])
|
||||
|
||||
def fromBackend[F[_]: Sync](
|
||||
backendApp: BackendApp[F],
|
||||
eventContext: Option[EventContext] = None
|
||||
): BackendCommandRunner[F, Unit] =
|
||||
new BackendCommands[F](
|
||||
backendApp.item,
|
||||
backendApp.attachment,
|
||||
backendApp.customFields,
|
||||
backendApp.notification,
|
||||
eventContext
|
||||
)
|
||||
|
||||
def apply[F[_]: Sync](
|
||||
item: OItem[F],
|
||||
attachment: OAttachment[F],
|
||||
fields: OCustomFields[F],
|
||||
notification: ONotification[F],
|
||||
eventContext: Option[EventContext] = None
|
||||
): BackendCommandRunner[F, Unit] =
|
||||
new BackendCommands[F](item, attachment, fields, notification, eventContext)
|
||||
}
|
@ -20,7 +20,8 @@ case class Config(
|
||||
mailDebug: Boolean,
|
||||
jdbc: JdbcConfig,
|
||||
signup: SignupConfig,
|
||||
files: Config.Files
|
||||
files: Config.Files,
|
||||
addons: Config.Addons
|
||||
) {
|
||||
|
||||
def mailSettings: Settings =
|
||||
@ -66,4 +67,21 @@ object Config {
|
||||
(storesEmpty |+| defaultStorePresent).map(_ => this)
|
||||
}
|
||||
}
|
||||
|
||||
case class Addons(
|
||||
enabled: Boolean,
|
||||
allowImpure: Boolean,
|
||||
allowedUrls: UrlMatcher,
|
||||
deniedUrls: UrlMatcher
|
||||
) {
|
||||
def isAllowed(url: LenientUri): Boolean =
|
||||
allowedUrls.matches(url) && !deniedUrls.matches(url)
|
||||
|
||||
def isDenied(url: LenientUri): Boolean =
|
||||
!isAllowed(url)
|
||||
}
|
||||
object Addons {
|
||||
val disabled: Addons =
|
||||
Addons(false, false, UrlMatcher.False, UrlMatcher.True)
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,26 @@ import docspell.notification.api.PeriodicQueryArgs
|
||||
import docspell.scheduler.Job
|
||||
|
||||
object JobFactory extends MailAddressCodec {
|
||||
def existingItemAddon[F[_]: Sync](
|
||||
args: ItemAddonTaskArgs,
|
||||
submitter: AccountId
|
||||
): F[Job[ItemAddonTaskArgs]] =
|
||||
Job.createNew(
|
||||
ItemAddonTaskArgs.taskName,
|
||||
submitter.collective,
|
||||
args,
|
||||
"Run addons on item",
|
||||
submitter.user,
|
||||
Priority.High,
|
||||
args.addonRunConfigs
|
||||
.map(_.take(23))
|
||||
.toList
|
||||
.sorted
|
||||
.foldLeft(args.itemId)(_ / _)
|
||||
.take(250)
|
||||
.some
|
||||
)
|
||||
|
||||
def downloadZip[F[_]: Sync](
|
||||
args: DownloadZipArgs,
|
||||
summaryId: Ident,
|
||||
|
@ -45,7 +45,14 @@ object CreateIndex {
|
||||
chunkSize: Int
|
||||
): F[Unit] = {
|
||||
val attachs = store
|
||||
.transact(QAttachment.allAttachmentMetaAndName(collective, itemIds, chunkSize))
|
||||
.transact(
|
||||
QAttachment.allAttachmentMetaAndName(
|
||||
collective,
|
||||
itemIds,
|
||||
ItemState.validStates,
|
||||
chunkSize
|
||||
)
|
||||
)
|
||||
.map(caa =>
|
||||
TextData
|
||||
.attachment(
|
||||
|
@ -0,0 +1,17 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.joex
|
||||
|
||||
import fs2.io.file.Path
|
||||
|
||||
import docspell.addons.AddonExecutorConfig
|
||||
|
||||
final case class AddonEnvConfig(
|
||||
workingDir: Path,
|
||||
cacheDir: Path,
|
||||
executorConfig: AddonExecutorConfig
|
||||
)
|
@ -0,0 +1,199 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.joex
|
||||
|
||||
import cats.data.OptionT
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
|
||||
import docspell.addons._
|
||||
import docspell.backend.joex.AddonOps.{AddonRunConfigRef, ExecResult}
|
||||
import docspell.backend.ops.OAttachment
|
||||
import docspell.common._
|
||||
import docspell.common.bc.BackendCommandRunner
|
||||
import docspell.common.exec.Env
|
||||
import docspell.logging.Logger
|
||||
import docspell.scheduler.JobStore
|
||||
import docspell.store.Store
|
||||
import docspell.store.file.FileUrlReader
|
||||
import docspell.store.records.AddonRunConfigResolved
|
||||
|
||||
trait AddonOps[F[_]] {
|
||||
|
||||
def execAll(
|
||||
collective: Ident,
|
||||
trigger: Set[AddonTriggerType],
|
||||
runConfigIds: Set[Ident],
|
||||
logger: Option[Logger[F]]
|
||||
)(
|
||||
middleware: Middleware[F]
|
||||
): F[ExecResult]
|
||||
|
||||
def execById(collective: Ident, runConfigId: Ident, logger: Logger[F])(
|
||||
middleware: Middleware[F]
|
||||
): F[ExecResult]
|
||||
|
||||
/** Find enabled addon run config references to be executed. Can be additionally
|
||||
* filtered by given ids and triggers.
|
||||
*/
|
||||
def findAddonRefs(
|
||||
collective: Ident,
|
||||
trigger: Set[AddonTriggerType],
|
||||
runConfigIds: Set[Ident]
|
||||
): F[List[AddonRunConfigRef]]
|
||||
|
||||
/** Find enabled addon run config reference given an addon task id */
|
||||
def findAddonRef(collective: Ident, runConfigId: Ident): F[Option[AddonRunConfigRef]]
|
||||
|
||||
/** Creates an executor for addons given a configuration. */
|
||||
def getExecutor(cfg: AddonExecutorConfig): F[AddonExecutor[F]]
|
||||
|
||||
}
|
||||
|
||||
object AddonOps {
|
||||
case class AddonRunConfigRef(
|
||||
id: Ident,
|
||||
collective: Ident,
|
||||
userId: Option[Ident],
|
||||
name: String,
|
||||
refs: List[AddonRef]
|
||||
)
|
||||
|
||||
object AddonRunConfigRef {
|
||||
def fromResolved(r: AddonRunConfigResolved): AddonRunConfigRef =
|
||||
AddonRunConfigRef(
|
||||
r.config.id,
|
||||
r.config.cid,
|
||||
r.config.userId,
|
||||
r.config.name,
|
||||
r.refs.map(ref => AddonRef(ref.archive.asArchive, ref.ref.args))
|
||||
)
|
||||
}
|
||||
|
||||
case class ExecResult(
|
||||
result: List[AddonExecutionResult],
|
||||
runConfigs: List[AddonRunConfigRef]
|
||||
) {
|
||||
lazy val combined = result.combineAll
|
||||
}
|
||||
|
||||
object ExecResult {
|
||||
def runConfigNotFound(id: Ident): ExecResult =
|
||||
ExecResult(
|
||||
AddonExecutionResult(
|
||||
AddonResult.executionFailed(
|
||||
new Exception(s"Addon run config ${id.id} not found.")
|
||||
) :: Nil,
|
||||
false
|
||||
) :: Nil,
|
||||
Nil
|
||||
)
|
||||
}
|
||||
|
||||
def apply[F[_]: Async](
|
||||
cfg: AddonEnvConfig,
|
||||
store: Store[F],
|
||||
cmdRunner: BackendCommandRunner[F, Unit],
|
||||
attachment: OAttachment[F],
|
||||
jobStore: JobStore[F]
|
||||
): AddonOps[F] =
|
||||
new AddonOps[F] with LoggerExtension {
|
||||
private[this] val logger = docspell.logging.getLogger[F]
|
||||
|
||||
private val urlReader = FileUrlReader(store.fileRepo)
|
||||
private val postProcess = AddonPostProcess(cmdRunner, store, attachment, jobStore)
|
||||
private val prepare = new AddonPrepare[F](store)
|
||||
|
||||
def execAll(
|
||||
collective: Ident,
|
||||
trigger: Set[AddonTriggerType],
|
||||
runConfigIds: Set[Ident],
|
||||
logger: Option[Logger[F]]
|
||||
)(
|
||||
custom: Middleware[F]
|
||||
): F[ExecResult] =
|
||||
for {
|
||||
runCfgs <- findAddonRefs(collective, trigger, runConfigIds)
|
||||
log = logger.getOrElse(this.logger)
|
||||
_ <- log.info(s"Running ${runCfgs.size} addon tasks for trigger $trigger")
|
||||
|
||||
results <- runCfgs.traverse(r => execRunConfig(log, r, custom))
|
||||
} yield ExecResult(results.flatMap(_.result), runCfgs)
|
||||
|
||||
def execById(collective: Ident, runConfigId: Ident, logger: Logger[F])(
|
||||
custom: Middleware[F]
|
||||
): F[ExecResult] =
|
||||
(for {
|
||||
runCfg <- OptionT(findAddonRef(collective, runConfigId))
|
||||
execRes <- OptionT.liftF(execRunConfig(logger, runCfg, custom))
|
||||
} yield execRes).getOrElse(ExecResult.runConfigNotFound(runConfigId))
|
||||
|
||||
def execRunConfig(
|
||||
logger: Logger[F],
|
||||
runCfg: AddonRunConfigRef,
|
||||
custom: Middleware[F]
|
||||
): F[ExecResult] =
|
||||
for {
|
||||
executor <- getExecutor(cfg.executorConfig)
|
||||
log = logger.withRunConfig(runCfg)
|
||||
result <-
|
||||
Directory.temp(cfg.workingDir, "addon-output-").use { outDir =>
|
||||
val cacheDir = cfg.cacheDir / runCfg.id.id
|
||||
val inputEnv =
|
||||
InputEnv(runCfg.refs, cfg.workingDir, outDir, cacheDir, Env.empty)
|
||||
|
||||
for {
|
||||
middleware <- createMiddleware(custom, runCfg)
|
||||
res <- middleware(executor.execute(log)).run(inputEnv)
|
||||
_ <- log.debug(s"Addon result: $res")
|
||||
_ <- postProcess.onResult(log, runCfg.collective, res, outDir)
|
||||
} yield res
|
||||
}
|
||||
execRes = ExecResult(List(result), List(runCfg))
|
||||
} yield execRes
|
||||
|
||||
def createMiddleware(custom: Middleware[F], runCfg: AddonRunConfigRef) = for {
|
||||
dscMW <- prepare.createDscEnv(runCfg, cfg.executorConfig.runTimeout)
|
||||
mm = dscMW >> custom >> prepare.logResult(logger, runCfg) >> Middleware
|
||||
.ephemeralRun[F]
|
||||
} yield mm
|
||||
|
||||
def getExecutor(cfg: AddonExecutorConfig): F[AddonExecutor[F]] =
|
||||
Async[F].pure(AddonExecutor(cfg, urlReader))
|
||||
|
||||
def findAddonRefs(
|
||||
collective: Ident,
|
||||
trigger: Set[AddonTriggerType],
|
||||
runConfigIds: Set[Ident]
|
||||
): F[List[AddonRunConfigRef]] =
|
||||
store
|
||||
.transact(
|
||||
AddonRunConfigResolved.findAllForCollective(
|
||||
collective,
|
||||
enabled = true.some,
|
||||
trigger,
|
||||
runConfigIds
|
||||
)
|
||||
)
|
||||
.map(_.map(AddonRunConfigRef.fromResolved))
|
||||
|
||||
def findAddonRef(
|
||||
collective: Ident,
|
||||
runConfigId: Ident
|
||||
): F[Option[AddonRunConfigRef]] =
|
||||
OptionT(
|
||||
store
|
||||
.transact(
|
||||
AddonRunConfigResolved.findById(
|
||||
runConfigId,
|
||||
collective,
|
||||
enabled = Some(true)
|
||||
)
|
||||
)
|
||||
).map(AddonRunConfigRef.fromResolved).value
|
||||
}
|
||||
}
|
@ -0,0 +1,198 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.joex
|
||||
|
||||
import cats.data.OptionT
|
||||
import cats.effect.kernel.Sync
|
||||
import cats.syntax.all._
|
||||
import fs2.io.file.{Files, Path}
|
||||
|
||||
import docspell.addons._
|
||||
import docspell.addons.out.{AddonOutput, ItemFile, NewItem}
|
||||
import docspell.backend.JobFactory
|
||||
import docspell.backend.ops.OAttachment
|
||||
import docspell.common._
|
||||
import docspell.common.bc.BackendCommandRunner
|
||||
import docspell.files.FileSupport
|
||||
import docspell.logging.Logger
|
||||
import docspell.scheduler.JobStore
|
||||
import docspell.store.Store
|
||||
import docspell.store.records._
|
||||
|
||||
final private[joex] class AddonPostProcess[F[_]: Sync: Files](
|
||||
cmdRunner: BackendCommandRunner[F, Unit],
|
||||
store: Store[F],
|
||||
attachOps: OAttachment[F],
|
||||
jobStore: JobStore[F]
|
||||
) extends FileSupport {
|
||||
|
||||
def onResult(
|
||||
logger: Logger[F],
|
||||
collective: Ident,
|
||||
result: AddonExecutionResult,
|
||||
outputDir: Path
|
||||
): F[Unit] =
|
||||
result.addonResult match {
|
||||
case AddonResult.Success(output) =>
|
||||
onSuccess(logger, collective, output, outputDir)
|
||||
case _ =>
|
||||
().pure[F]
|
||||
}
|
||||
|
||||
def onSuccess(
|
||||
logger: Logger[F],
|
||||
collective: Ident,
|
||||
output: AddonOutput,
|
||||
outputDir: Path
|
||||
): F[Unit] =
|
||||
for {
|
||||
_ <- logger.info("Applying addon output")
|
||||
_ <- cmdRunner.runAll(collective, output.commands)
|
||||
_ <- logger.debug("Applying changes from files")
|
||||
_ <- output.files.traverse_(updateOne(logger, collective, outputDir))
|
||||
_ <- output.newItems.traverse_(submitNewItem(logger, collective, outputDir))
|
||||
} yield ()
|
||||
|
||||
def submitNewItem(
|
||||
logger: Logger[F],
|
||||
collective: Ident,
|
||||
outputDir: Path
|
||||
)(newItem: NewItem): F[Unit] =
|
||||
for {
|
||||
_ <- logger.info(s"Submit new item with ${newItem.files.size} files")
|
||||
files <- newItem.resolveFiles[F](logger, outputDir)
|
||||
collLang <- store.transact(RCollective.findLanguage(collective))
|
||||
uploaded <- files.traverse(file =>
|
||||
file.readAll
|
||||
.through(
|
||||
store.fileRepo.save(
|
||||
collective,
|
||||
FileCategory.AttachmentSource,
|
||||
MimeTypeHint.filename(file)
|
||||
)
|
||||
)
|
||||
.compile
|
||||
.lastOrError
|
||||
.map(key => file.fileName.toString -> key)
|
||||
)
|
||||
_ <- logger.debug(s"Saved ${uploaded.size} files to be processed.")
|
||||
args = ProcessItemArgs(
|
||||
newItem.toProcessMeta(collective, collLang, "addon"),
|
||||
uploaded.map(f => ProcessItemArgs.File(f._1.some, f._2))
|
||||
)
|
||||
account = AccountId(collective, DocspellSystem.user)
|
||||
job <- JobFactory.processItem(args, account, Priority.High, None)
|
||||
_ <- jobStore.insert(job.encode)
|
||||
_ <- logger.debug(s"Submitted job for processing: ${job.id}")
|
||||
} yield ()
|
||||
|
||||
def updateOne(logger: Logger[F], collective: Ident, outputDir: Path)(
|
||||
itemFile: ItemFile
|
||||
): F[Unit] =
|
||||
for {
|
||||
textFiles <- itemFile.resolveTextFiles(logger, outputDir)
|
||||
pdfFiles <- itemFile.resolvePdfFiles(logger, outputDir)
|
||||
previewFiles <- itemFile.resolvePreviewFiles(logger, outputDir)
|
||||
attachs <- OptionT
|
||||
.whenF(textFiles.nonEmpty || pdfFiles.nonEmpty || previewFiles.nonEmpty)(
|
||||
store.transact(RAttachment.findByItem(itemFile.itemId))
|
||||
)
|
||||
.getOrElse(Vector.empty)
|
||||
_ <- textFiles.traverse_ { case (key, file) =>
|
||||
withAttach(logger, key, attachs) { ra =>
|
||||
setText(collective, ra, file.readText)
|
||||
}
|
||||
}
|
||||
_ <- pdfFiles.traverse_ { case (key, file) =>
|
||||
withAttach(logger, key, attachs) { ra =>
|
||||
replacePdf(collective, ra, file, previewFiles.forall(_._1 != key))
|
||||
}
|
||||
}
|
||||
_ <- previewFiles.traverse_ { case (key, file) =>
|
||||
withAttach(logger, key, attachs) { ra =>
|
||||
replacePreview(collective, ra.id, file)
|
||||
}
|
||||
}
|
||||
_ <- submitNewFiles(logger, collective, outputDir)(itemFile)
|
||||
} yield ()
|
||||
|
||||
def submitNewFiles(
|
||||
logger: Logger[F],
|
||||
collective: Ident,
|
||||
outputDir: Path
|
||||
)(itemFile: ItemFile): F[Unit] =
|
||||
for {
|
||||
_ <- logger.info(s"Submitting new file for item")
|
||||
collLang <- store.transact(RCollective.findLanguage(collective))
|
||||
newFiles <- itemFile.resolveNewFiles(logger, outputDir)
|
||||
byMeta = newFiles.groupBy(_._1.metadata).view.mapValues(_.map(_._2))
|
||||
account = AccountId(collective, DocspellSystem.user)
|
||||
_ <- byMeta.toList.traverse_ { case (meta, files) =>
|
||||
for {
|
||||
uploaded <- files.traverse(file =>
|
||||
file.readAll
|
||||
.through(
|
||||
store.fileRepo.save(
|
||||
collective,
|
||||
FileCategory.AttachmentSource,
|
||||
MimeTypeHint.filename(file)
|
||||
)
|
||||
)
|
||||
.compile
|
||||
.lastOrError
|
||||
.map(key => file.fileName.toString -> key)
|
||||
)
|
||||
args = ProcessItemArgs(
|
||||
meta.toProcessMeta(collective, itemFile.itemId, collLang, "addon"),
|
||||
uploaded.map(f => ProcessItemArgs.File(f._1.some, f._2))
|
||||
)
|
||||
job <- JobFactory.processItem(args, account, Priority.High, None)
|
||||
_ <- jobStore.insert(job.encode)
|
||||
_ <- logger.debug(s"Submitted job for processing: ${job.id}")
|
||||
} yield ()
|
||||
}
|
||||
} yield ()
|
||||
|
||||
private def withAttach(logger: Logger[F], key: String, attachs: Vector[RAttachment])(
|
||||
run: RAttachment => F[Unit]
|
||||
): F[Unit] =
|
||||
OptionT
|
||||
.fromOption(
|
||||
attachs.find(a => a.id.id == key || key.toIntOption == a.position.some)
|
||||
)
|
||||
.semiflatMap(run)
|
||||
.getOrElseF(logger.warn(s"Cannot find attachment for $key to update text!"))
|
||||
|
||||
private def setText(collective: Ident, ra: RAttachment, readText: F[String]): F[Unit] =
|
||||
attachOps.setExtractedText(collective, ra.itemId, ra.id, readText)
|
||||
|
||||
private def replacePdf(
|
||||
collective: Ident,
|
||||
ra: RAttachment,
|
||||
file: Path,
|
||||
generatePreview: Boolean
|
||||
): F[Unit] =
|
||||
attachOps.addOrReplacePdf(collective, ra.id, file.readAll, generatePreview)
|
||||
|
||||
private def replacePreview(
|
||||
collective: Ident,
|
||||
attachId: Ident,
|
||||
imageData: Path
|
||||
): F[Unit] =
|
||||
attachOps.addOrReplacePreview(collective, attachId, imageData.readAll)
|
||||
}
|
||||
|
||||
object AddonPostProcess {
|
||||
|
||||
def apply[F[_]: Sync: Files](
|
||||
cmdRunner: BackendCommandRunner[F, Unit],
|
||||
store: Store[F],
|
||||
attachment: OAttachment[F],
|
||||
jobStore: JobStore[F]
|
||||
): AddonPostProcess[F] =
|
||||
new AddonPostProcess[F](cmdRunner, store, attachment, jobStore)
|
||||
}
|
@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.joex
|
||||
|
||||
import cats.data.{Kleisli, OptionT}
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
|
||||
import docspell.addons.Middleware
|
||||
import docspell.backend.auth.AuthToken
|
||||
import docspell.backend.joex.AddonOps.AddonRunConfigRef
|
||||
import docspell.common._
|
||||
import docspell.logging.Logger
|
||||
import docspell.store.Store
|
||||
import docspell.store.records.{RNode, RUser}
|
||||
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExtension {
|
||||
|
||||
def logResult(logger: Logger[F], ref: AddonRunConfigRef): Middleware[F] =
|
||||
Middleware(_.mapF(_.attempt.flatTap {
|
||||
case Right(_) => ().pure[F]
|
||||
case Left(ex) =>
|
||||
logger
|
||||
.withRunConfig(ref)
|
||||
.warn(ex)(s"Addon task '${ref.id.id}' has failed")
|
||||
}.rethrow))
|
||||
|
||||
/** Creates environment variables for dsc to connect to the docspell server for the
|
||||
* given run config.
|
||||
*/
|
||||
def createDscEnv(
|
||||
runConfigRef: AddonRunConfigRef,
|
||||
tokenValidity: Duration
|
||||
): F[Middleware[F]] =
|
||||
(for {
|
||||
userId <- OptionT.fromOption[F](runConfigRef.userId)
|
||||
user <- OptionT(store.transact(RUser.getIdByIdOrLogin(userId)))
|
||||
account = AccountId(runConfigRef.collective, user.login)
|
||||
env =
|
||||
Middleware.prepare[F](
|
||||
Kleisli(input => makeDscEnv(account, tokenValidity).map(input.addEnv))
|
||||
)
|
||||
} yield env).getOrElse(Middleware.identity[F])
|
||||
|
||||
/** Creates environment variables to have dsc automatically connect as the given user.
|
||||
* Additionally a random rest-server is looked up from the database to set its url.
|
||||
*/
|
||||
def makeDscEnv(
|
||||
accountId: AccountId,
|
||||
tokenValidity: Duration
|
||||
): F[Map[String, String]] =
|
||||
for {
|
||||
serverNode <- store.transact(
|
||||
RNode
|
||||
.findAll(NodeType.Restserver)
|
||||
.map(_.sortBy(_.updated).lastOption)
|
||||
)
|
||||
url = serverNode.map(_.url).map(u => "DSC_DOCSPELL_URL" -> u.asString)
|
||||
secret = serverNode.flatMap(_.serverSecret)
|
||||
|
||||
token <- AuthToken.user(
|
||||
accountId,
|
||||
false,
|
||||
secret.getOrElse(ByteVector.empty),
|
||||
tokenValidity.some
|
||||
)
|
||||
session = ("DSC_SESSION" -> token.asString).some
|
||||
} yield List(url, session).flatten.toMap
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.joex
|
||||
|
||||
import docspell.backend.joex.AddonOps.AddonRunConfigRef
|
||||
import docspell.logging.Logger
|
||||
|
||||
trait LoggerExtension {
|
||||
|
||||
implicit final class LoggerDataOps[F[_]](self: Logger[F]) {
|
||||
def withRunConfig(t: AddonRunConfigRef): Logger[F] =
|
||||
self.capture("addon-task-id", t.id)
|
||||
}
|
||||
}
|
@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.ops
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
import docspell.addons.{AddonArchive, AddonMeta, AddonTriggerType}
|
||||
|
||||
sealed trait AddonRunConfigError {
|
||||
final def cast: AddonRunConfigError = this
|
||||
|
||||
def toLeft[A]: Either[AddonRunConfigError, A] = Left(this)
|
||||
|
||||
def message: String
|
||||
}
|
||||
|
||||
object AddonRunConfigError {
|
||||
|
||||
case object MissingSchedule extends AddonRunConfigError {
|
||||
val message =
|
||||
"The run config has a trigger 'scheduled' but doesn't provide a schedule!"
|
||||
}
|
||||
|
||||
case object ObsoleteSchedule extends AddonRunConfigError {
|
||||
val message = "The run config has a schedule, but not a trigger 'Scheduled'."
|
||||
}
|
||||
|
||||
case class MismatchingTrigger(unsupported: NonEmptyList[(String, AddonTriggerType)])
|
||||
extends AddonRunConfigError {
|
||||
def message: String = {
|
||||
val list =
|
||||
unsupported.map { case (name, tt) => s"$name: ${tt.name}" }.toList.mkString(", ")
|
||||
s"Some listed addons don't support all defined triggers: $list"
|
||||
}
|
||||
}
|
||||
|
||||
object MismatchingTrigger {
|
||||
def apply(addon: AddonMeta, tt: AddonTriggerType): MismatchingTrigger =
|
||||
MismatchingTrigger(NonEmptyList.of(addon.nameAndVersion -> tt))
|
||||
|
||||
def apply(addon: AddonArchive, tt: AddonTriggerType): MismatchingTrigger =
|
||||
MismatchingTrigger(NonEmptyList.of(addon.nameAndVersion -> tt))
|
||||
}
|
||||
}
|
@ -0,0 +1,54 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.ops
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
|
||||
import docspell.backend.ops.AddonRunConfigError._
|
||||
import docspell.backend.ops.OAddons.{AddonRunConfigResult, AddonRunInsert}
|
||||
import docspell.common.Ident
|
||||
import docspell.store.Store
|
||||
import docspell.store.records.RAddonArchive
|
||||
|
||||
object AddonRunConfigValidate {
|
||||
|
||||
def apply[F[_]: Sync](store: Store[F], cid: Ident)(
|
||||
cfg: AddonRunInsert
|
||||
): F[AddonRunConfigResult[AddonRunInsert]] = {
|
||||
val init: AddonRunConfigResult[Unit] = ().asRight
|
||||
|
||||
List(
|
||||
checkScheduled(cfg).pure[F],
|
||||
checkTriggers(store, cid)(cfg)
|
||||
)
|
||||
.foldLeftM(init)((res, fr) => fr.map(r => res.flatMap(_ => r)))
|
||||
.map(_.as(cfg))
|
||||
}
|
||||
|
||||
def checkTriggers[F[_]: Sync](store: Store[F], cid: Ident)(
|
||||
cfg: AddonRunInsert
|
||||
): F[AddonRunConfigResult[Unit]] =
|
||||
for {
|
||||
addons <- store.transact(RAddonArchive.findByIds(cid, cfg.addons.map(_.addonId)))
|
||||
given = cfg.triggered.toList.toSet
|
||||
res = addons
|
||||
.flatMap(r => given.diff(r.triggers).map(tt => r.nameAndVersion -> tt))
|
||||
|
||||
maybeError = NonEmptyList
|
||||
.fromList(res)
|
||||
.map(nel => MismatchingTrigger(nel))
|
||||
} yield maybeError.map(_.toLeft).getOrElse(Right(()))
|
||||
|
||||
def checkScheduled(cfg: AddonRunInsert): AddonRunConfigResult[Unit] =
|
||||
(cfg.isScheduled, cfg.schedule) match {
|
||||
case (true, None) => MissingSchedule.toLeft[Unit]
|
||||
case (false, Some(_)) => ObsoleteSchedule.toLeft[Unit]
|
||||
case _ => ().asRight
|
||||
}
|
||||
}
|
@ -0,0 +1,156 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.ops
|
||||
|
||||
import cats.data.EitherT
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
import fs2.Stream
|
||||
import fs2.io.file.Path
|
||||
|
||||
import docspell.addons.{AddonMeta, RunnerType}
|
||||
import docspell.backend.Config
|
||||
import docspell.backend.ops.AddonValidationError._
|
||||
import docspell.backend.ops.OAddons.AddonValidationResult
|
||||
import docspell.common.{Ident, LenientUri, UrlReader}
|
||||
import docspell.joexapi.model.AddonSupport
|
||||
import docspell.store.Store
|
||||
import docspell.store.records.RAddonArchive
|
||||
|
||||
final class AddonValidate[F[_]: Async](
|
||||
cfg: Config.Addons,
|
||||
store: Store[F],
|
||||
joexOps: OJoex[F]
|
||||
) {
|
||||
private[this] val logger = docspell.logging.getLogger[F]
|
||||
|
||||
def fromUrl(
|
||||
collective: Ident,
|
||||
url: LenientUri,
|
||||
reader: UrlReader[F],
|
||||
localUrl: Option[LenientUri] = None,
|
||||
checkExisting: Boolean = true
|
||||
): F[AddonValidationResult[AddonMeta]] =
|
||||
if (!cfg.enabled) AddonsDisabled.resultF
|
||||
else if (cfg.isDenied(url)) UrlUntrusted(url).resultF
|
||||
else if (checkExisting)
|
||||
store.transact(RAddonArchive.findByUrl(collective, url)).flatMap {
|
||||
case Some(ar) =>
|
||||
AddonExists("An addon with this url already exists!", ar).resultF
|
||||
case None =>
|
||||
archive(collective, reader(localUrl.getOrElse(url)).asRight, checkExisting)
|
||||
}
|
||||
else archive(collective, reader(localUrl.getOrElse(url)).asRight, checkExisting)
|
||||
|
||||
def archive(
|
||||
collective: Ident,
|
||||
addonData: Either[Path, Stream[F, Byte]],
|
||||
checkExisting: Boolean = true
|
||||
): F[AddonValidationResult[AddonMeta]] =
|
||||
(for {
|
||||
_ <- EitherT.cond[F](cfg.enabled, (), AddonsDisabled.cast)
|
||||
|
||||
meta <-
|
||||
EitherT(
|
||||
addonData
|
||||
.fold(
|
||||
AddonMeta.findInDirectory[F],
|
||||
AddonMeta.findInZip[F]
|
||||
)
|
||||
.attempt
|
||||
)
|
||||
.leftMap(ex => NotAnAddon(ex).cast)
|
||||
_ <- EitherT.cond(
|
||||
meta.triggers.exists(_.nonEmpty),
|
||||
(),
|
||||
InvalidAddon(
|
||||
"The addon doesn't define any triggers. At least one is required!"
|
||||
).cast
|
||||
)
|
||||
_ <- EitherT.cond(
|
||||
meta.options.exists(_.isUseful),
|
||||
(),
|
||||
InvalidAddon(
|
||||
"Addon defines no output and no networking. It can't do anything useful."
|
||||
).cast
|
||||
)
|
||||
_ <- EitherT.cond(cfg.allowImpure || meta.isPure, (), ImpureAddonsDisabled.cast)
|
||||
|
||||
_ <-
|
||||
if (checkExisting)
|
||||
EitherT(
|
||||
store
|
||||
.transact(
|
||||
RAddonArchive
|
||||
.findByNameAndVersion(collective, meta.meta.name, meta.meta.version)
|
||||
)
|
||||
.map {
|
||||
case Some(ar) => AddonExists(ar).result
|
||||
case None => rightUnit
|
||||
}
|
||||
)
|
||||
else rightUnitT
|
||||
|
||||
joexSupport <- EitherT.liftF(joexOps.getAddonSupport)
|
||||
addonRunners <- EitherT.liftF(meta.enabledTypes(addonData))
|
||||
_ <- EitherT.liftF(
|
||||
logger.info(
|
||||
s"Comparing joex support vs addon runner: $joexSupport vs. $addonRunners"
|
||||
)
|
||||
)
|
||||
_ <- EitherT.fromEither(validateJoexSupport(addonRunners, joexSupport))
|
||||
|
||||
} yield meta).value
|
||||
|
||||
private def validateJoexSupport(
|
||||
addonRunnerTypes: List[RunnerType],
|
||||
joexSupport: List[AddonSupport]
|
||||
): AddonValidationResult[Unit] = {
|
||||
val addonRunners = addonRunnerTypes.mkString(", ")
|
||||
for {
|
||||
_ <- Either.cond(
|
||||
joexSupport.nonEmpty,
|
||||
(),
|
||||
AddonUnsupported("There are no joex nodes that have addons enabled!", Nil).cast
|
||||
)
|
||||
_ <- Either.cond(
|
||||
addonRunners.nonEmpty,
|
||||
(),
|
||||
InvalidAddon("The addon doesn't enable any runner.")
|
||||
)
|
||||
|
||||
ids = joexSupport
|
||||
.map(n => n.nodeId -> n.runners.intersect(addonRunnerTypes).toSet)
|
||||
|
||||
unsupportedJoex = ids.filter(_._2.isEmpty).map(_._1)
|
||||
|
||||
_ <- Either.cond(
|
||||
ids.forall(_._2.nonEmpty),
|
||||
(),
|
||||
AddonUnsupported(
|
||||
s"A joex node doesn't support this addons runners: $addonRunners. " +
|
||||
s"Check: ${unsupportedJoex.map(_.id).mkString(", ")}.",
|
||||
unsupportedJoex
|
||||
).cast
|
||||
)
|
||||
} yield ()
|
||||
}
|
||||
|
||||
private def rightUnit: AddonValidationResult[Unit] =
|
||||
().asRight[AddonValidationError]
|
||||
|
||||
private def rightUnitT: EitherT[F, AddonValidationError, Unit] =
|
||||
EitherT.fromEither(rightUnit)
|
||||
|
||||
implicit final class ErrorOps(self: AddonValidationError) {
|
||||
def result: AddonValidationResult[AddonMeta] =
|
||||
self.toLeft
|
||||
|
||||
def resultF: F[AddonValidationResult[AddonMeta]] =
|
||||
result.pure[F]
|
||||
}
|
||||
}
|
@ -0,0 +1,85 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.ops
|
||||
|
||||
import docspell.common.{Ident, LenientUri}
|
||||
import docspell.store.records.RAddonArchive
|
||||
|
||||
import io.circe.generic.extras.Configuration
|
||||
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
|
||||
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||
import io.circe.{Decoder, Encoder}
|
||||
|
||||
sealed trait AddonValidationError {
|
||||
def cast: AddonValidationError = this
|
||||
|
||||
def toLeft[A]: Either[AddonValidationError, A] = Left(this)
|
||||
}
|
||||
|
||||
object AddonValidationError {
|
||||
|
||||
implicit private val throwableDecoder: Decoder[Throwable] =
|
||||
Decoder.decodeString.map(new Exception(_))
|
||||
implicit private val throwableEncoder: Encoder[Throwable] =
|
||||
Encoder.encodeString.contramap(_.getMessage)
|
||||
|
||||
case object AddonsDisabled extends AddonValidationError {}
|
||||
|
||||
case class UrlUntrusted(url: LenientUri) extends AddonValidationError
|
||||
object UrlUntrusted {
|
||||
implicit val jsonDecoder: Decoder[UrlUntrusted] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[UrlUntrusted] = deriveEncoder
|
||||
}
|
||||
|
||||
case class NotAnAddon(error: Throwable) extends AddonValidationError
|
||||
object NotAnAddon {
|
||||
implicit val jsonDecoder: Decoder[NotAnAddon] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[NotAnAddon] = deriveEncoder
|
||||
}
|
||||
|
||||
case class AddonUnsupported(message: String, affectedNodes: List[Ident])
|
||||
extends AddonValidationError
|
||||
object AddonUnsupported {
|
||||
implicit val jsonDecoder: Decoder[AddonUnsupported] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[AddonUnsupported] = deriveEncoder
|
||||
}
|
||||
|
||||
case class InvalidAddon(message: String) extends AddonValidationError
|
||||
object InvalidAddon {
|
||||
implicit val jsonDecoder: Decoder[InvalidAddon] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[InvalidAddon] = deriveEncoder
|
||||
}
|
||||
|
||||
case class AddonExists(message: String, addon: RAddonArchive)
|
||||
extends AddonValidationError
|
||||
object AddonExists {
|
||||
def apply(addon: RAddonArchive): AddonExists =
|
||||
AddonExists(s"An addon '${addon.name}/${addon.version}' already exists!", addon)
|
||||
|
||||
implicit val jsonDecoder: Decoder[AddonExists] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[AddonExists] = deriveEncoder
|
||||
}
|
||||
|
||||
case object AddonNotFound extends AddonValidationError
|
||||
|
||||
case class DownloadFailed(error: Throwable) extends AddonValidationError
|
||||
object DownloadFailed {
|
||||
implicit val jsonDecoder: Decoder[DownloadFailed] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[DownloadFailed] = deriveEncoder
|
||||
}
|
||||
|
||||
case object ImpureAddonsDisabled extends AddonValidationError
|
||||
|
||||
case object RefreshLocalAddon extends AddonValidationError
|
||||
|
||||
implicit val jsonConfig: Configuration =
|
||||
Configuration.default.withKebabCaseConstructorNames
|
||||
.withDiscriminator("errorType")
|
||||
|
||||
implicit val jsonDecoder: Decoder[AddonValidationError] = deriveConfiguredDecoder
|
||||
implicit val jsonEncoder: Encoder[AddonValidationError] = deriveConfiguredEncoder
|
||||
}
|
@ -0,0 +1,426 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.ops
|
||||
|
||||
import cats.data.{EitherT, NonEmptyList, OptionT}
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
|
||||
import docspell.addons.{AddonMeta, AddonTriggerType}
|
||||
import docspell.backend.ops.AddonValidationError._
|
||||
import docspell.backend.ops.OAddons._
|
||||
import docspell.backend.{Config, JobFactory}
|
||||
import docspell.common._
|
||||
import docspell.logging.Logger
|
||||
import docspell.scheduler.JobStore
|
||||
import docspell.scheduler.usertask.{UserTask, UserTaskScope, UserTaskStore}
|
||||
import docspell.store.Store
|
||||
import docspell.store.file.FileUrlReader
|
||||
import docspell.store.records._
|
||||
|
||||
import com.github.eikek.calev.CalEvent
|
||||
|
||||
trait OAddons[F[_]] {
|
||||
|
||||
/** Registers a new addon. An error is returned if an addon with this url already
|
||||
* exists.
|
||||
*/
|
||||
def registerAddon(
|
||||
collective: Ident,
|
||||
url: LenientUri,
|
||||
logger: Option[Logger[F]]
|
||||
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]]
|
||||
|
||||
/** Refreshes an existing addon by downloading it again and updating metadata. */
|
||||
def refreshAddon(
|
||||
collective: Ident,
|
||||
addonId: Ident
|
||||
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]]
|
||||
|
||||
/** Look into the addon at the given url and return its metadata. */
|
||||
def inspectAddon(
|
||||
collective: Ident,
|
||||
url: LenientUri
|
||||
): F[AddonValidationResult[AddonMeta]]
|
||||
|
||||
/** Deletes the addon if it exists. */
|
||||
def deleteAddon(collective: Ident, addonId: Ident): F[Boolean]
|
||||
|
||||
def getAllAddons(collective: Ident): F[List[RAddonArchive]]
|
||||
|
||||
/** Inserts or updates the addon run configuration. If it already exists (and the given
|
||||
* id is non empty), it will be completely replaced with the given one.
|
||||
*/
|
||||
def upsertAddonRunConfig(
|
||||
collective: Ident,
|
||||
runConfig: AddonRunInsert
|
||||
): F[AddonRunConfigResult[Ident]]
|
||||
|
||||
/** Deletes this task from the database. */
|
||||
def deleteAddonRunConfig(collective: Ident, runConfigId: Ident): F[Boolean]
|
||||
|
||||
def getAllAddonRunConfigs(collective: Ident): F[List[AddonRunInfo]]
|
||||
|
||||
def runAddonForItem(
|
||||
account: AccountId,
|
||||
itemIds: NonEmptyList[Ident],
|
||||
addonRunConfigIds: Set[Ident]
|
||||
): F[Unit]
|
||||
}
|
||||
|
||||
object OAddons {
|
||||
val scheduledAddonTaskName: Ident =
|
||||
ScheduledAddonTaskArgs.taskName
|
||||
|
||||
case class AddonRunInsert(
|
||||
id: Ident,
|
||||
name: String,
|
||||
enabled: Boolean,
|
||||
userId: Option[Ident],
|
||||
schedule: Option[CalEvent],
|
||||
triggered: NonEmptyList[AddonTriggerType],
|
||||
addons: NonEmptyList[AddonArgs]
|
||||
) {
|
||||
|
||||
def isScheduled: Boolean =
|
||||
triggered.exists(_ == AddonTriggerType.Scheduled)
|
||||
}
|
||||
case class AddonArgs(addonId: Ident, args: String)
|
||||
|
||||
case class AddonRunInfo(
|
||||
id: Ident,
|
||||
name: String,
|
||||
enabled: Boolean,
|
||||
userId: Option[Ident],
|
||||
schedule: Option[CalEvent],
|
||||
triggered: List[AddonTriggerType],
|
||||
addons: List[(RAddonArchive, RAddonRunConfigAddon)]
|
||||
)
|
||||
object AddonRunInfo {
|
||||
def fromRunConfigData(
|
||||
timer: Option[CalEvent],
|
||||
addons: List[(RAddonArchive, RAddonRunConfigAddon)]
|
||||
)(t: AddonRunConfigData): AddonRunInfo =
|
||||
AddonRunInfo(
|
||||
id = t.runConfig.id,
|
||||
name = t.runConfig.name,
|
||||
enabled = t.runConfig.enabled,
|
||||
userId = t.runConfig.userId,
|
||||
schedule = timer,
|
||||
triggered = t.triggers.map(_.trigger),
|
||||
addons = addons
|
||||
)
|
||||
}
|
||||
|
||||
type AddonRunConfigResult[A] = Either[AddonRunConfigError, A]
|
||||
object AddonRunConfigResult {
|
||||
def success[A](value: A): AddonRunConfigResult[A] = Right(value)
|
||||
def failure[A](error: AddonRunConfigError): AddonRunConfigResult[A] = error.toLeft[A]
|
||||
}
|
||||
|
||||
type AddonValidationResult[A] = Either[AddonValidationError, A]
|
||||
object AddonValidationResult {
|
||||
def success[A](value: A): AddonValidationResult[A] = Right(value)
|
||||
def failure[A](error: AddonValidationError): AddonValidationResult[A] = Left(error)
|
||||
}
|
||||
|
||||
def apply[F[_]: Async](
|
||||
cfg: Config.Addons,
|
||||
store: Store[F],
|
||||
userTasks: UserTaskStore[F],
|
||||
jobStore: JobStore[F],
|
||||
joex: OJoex[F]
|
||||
): OAddons[F] =
|
||||
new OAddons[F] {
|
||||
private[this] val logger = docspell.logging.getLogger[F]
|
||||
private val urlReader = FileUrlReader(store.fileRepo)
|
||||
private val zip = MimeType.zip.asString
|
||||
private val addonValidate = new AddonValidate[F](cfg, store, joex)
|
||||
|
||||
def getAllAddonRunConfigs(collective: Ident): F[List[AddonRunInfo]] =
|
||||
for {
|
||||
all <- store.transact(AddonRunConfigData.findAll(collective))
|
||||
runConfigIDs = all.map(_.runConfig.id).toSet
|
||||
archiveIds = all.flatMap(_.addons.map(_.addonId)).distinct
|
||||
archives <- NonEmptyList
|
||||
.fromList(archiveIds)
|
||||
.fold(List.empty[RAddonArchive].pure[F])(ids =>
|
||||
store.transact(RAddonArchive.findByIds(collective, ids))
|
||||
)
|
||||
archivesMap = archives.groupBy(_.id)
|
||||
ptask <- userTasks
|
||||
.getAll(UserTaskScope.collective(collective))
|
||||
.filter(ut => runConfigIDs.contains(ut.id))
|
||||
.map(ut => ut.id -> ut)
|
||||
.compile
|
||||
.toList
|
||||
.map(_.toMap)
|
||||
result = all.map { t =>
|
||||
AddonRunInfo.fromRunConfigData(
|
||||
ptask.get(t.runConfig.id).map(_.timer),
|
||||
t.addons.map(raa => (archivesMap(raa.addonId).head, raa))
|
||||
)(t)
|
||||
}
|
||||
} yield result
|
||||
|
||||
def upsertAddonRunConfig(
|
||||
collective: Ident,
|
||||
runConfig: AddonRunInsert
|
||||
): F[AddonRunConfigResult[Ident]] = {
|
||||
val insertDataRaw = AddonRunConfigData(
|
||||
RAddonRunConfig(
|
||||
runConfig.id,
|
||||
collective,
|
||||
runConfig.userId,
|
||||
runConfig.name,
|
||||
runConfig.enabled,
|
||||
Timestamp.Epoch
|
||||
),
|
||||
runConfig.addons.zipWithIndex.map { case (a, index) =>
|
||||
RAddonRunConfigAddon(Ident.unsafe(""), runConfig.id, a.addonId, a.args, index)
|
||||
}.toList,
|
||||
runConfig.triggered
|
||||
.map(t => RAddonRunConfigTrigger(Ident.unsafe(""), runConfig.id, t))
|
||||
.toList
|
||||
)
|
||||
|
||||
val upsert = for {
|
||||
userId <-
|
||||
OptionT
|
||||
.fromOption(runConfig.userId)
|
||||
.flatMapF(uid => store.transact(RUser.getIdByIdOrLogin(uid)))
|
||||
.map(_.uid)
|
||||
.value
|
||||
insertData =
|
||||
insertDataRaw.copy(runConfig =
|
||||
insertDataRaw.runConfig.copy(userId = userId.orElse(runConfig.userId))
|
||||
)
|
||||
id <-
|
||||
OptionT(store.transact(RAddonRunConfig.findById(collective, runConfig.id)))
|
||||
.map(rt =>
|
||||
AddonRunConfigData(
|
||||
rt.copy(
|
||||
userId = insertData.runConfig.userId,
|
||||
name = insertData.runConfig.name,
|
||||
enabled = insertData.runConfig.enabled
|
||||
),
|
||||
insertData.addons,
|
||||
insertData.triggers
|
||||
)
|
||||
)
|
||||
.semiflatMap(rt =>
|
||||
store.transact(AddonRunConfigData.update(rt).as(rt.runConfig.id))
|
||||
)
|
||||
.getOrElseF(store.transact(AddonRunConfigData.insert(insertData)))
|
||||
} yield id
|
||||
|
||||
EitherT(AddonRunConfigValidate(store, collective)(runConfig))
|
||||
.semiflatMap(_ =>
|
||||
upsert.flatTap { runConfigId =>
|
||||
runConfig.schedule match {
|
||||
case Some(timer) =>
|
||||
userTasks.updateTask(
|
||||
UserTaskScope.collective(collective),
|
||||
s"Addon task ${runConfig.name}".some,
|
||||
UserTask(
|
||||
runConfigId,
|
||||
scheduledAddonTaskName,
|
||||
true,
|
||||
timer,
|
||||
s"Running scheduled addon task ${runConfig.name}".some,
|
||||
ScheduledAddonTaskArgs(collective, runConfigId)
|
||||
)
|
||||
)
|
||||
case None =>
|
||||
userTasks.deleteTask(UserTaskScope.collective(collective), runConfigId)
|
||||
}
|
||||
}
|
||||
)
|
||||
.value
|
||||
}
|
||||
|
||||
def deleteAddonRunConfig(collective: Ident, runConfigId: Ident): F[Boolean] = {
|
||||
val deleteRunConfig =
|
||||
(for {
|
||||
e <- OptionT(RAddonRunConfig.findById(collective, runConfigId))
|
||||
_ <- OptionT.liftF(RAddonRunConfigAddon.deleteAllForConfig(e.id))
|
||||
_ <- OptionT.liftF(RAddonRunConfigTrigger.deleteAllForConfig(e.id))
|
||||
_ <- OptionT.liftF(RAddonRunConfig.deleteById(collective, e.id))
|
||||
} yield true).getOrElse(false)
|
||||
|
||||
for {
|
||||
deleted <- store.transact(deleteRunConfig)
|
||||
_ <-
|
||||
if (deleted)
|
||||
userTasks.deleteTask(UserTaskScope.collective(collective), runConfigId)
|
||||
else 0.pure[F]
|
||||
} yield deleted
|
||||
}
|
||||
|
||||
def getAllAddons(collective: Ident): F[List[RAddonArchive]] =
|
||||
store.transact(RAddonArchive.listAll(collective))
|
||||
|
||||
def deleteAddon(collective: Ident, addonId: Ident): F[Boolean] =
|
||||
store.transact(RAddonArchive.deleteById(collective, addonId)).map(_ > 0)
|
||||
|
||||
def inspectAddon(
|
||||
collective: Ident,
|
||||
url: LenientUri
|
||||
): F[AddonValidationResult[AddonMeta]] =
|
||||
addonValidate.fromUrl(collective, url, urlReader, checkExisting = false)
|
||||
|
||||
def registerAddon(
|
||||
collective: Ident,
|
||||
url: LenientUri,
|
||||
logger: Option[Logger[F]]
|
||||
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] = {
|
||||
val log = logger.getOrElse(this.logger)
|
||||
def validateAndInsert(file: FileKey, localUrl: LenientUri) =
|
||||
addonValidate.fromUrl(collective, url, urlReader, localUrl.some).flatMap {
|
||||
case Right(meta) =>
|
||||
insertAddon(collective, url, meta, file)
|
||||
.map(ar => AddonValidationResult.success(ar -> meta))
|
||||
|
||||
case Left(error) =>
|
||||
store.fileRepo
|
||||
.delete(file)
|
||||
.as(AddonValidationResult.failure[(RAddonArchive, AddonMeta)](error))
|
||||
}
|
||||
|
||||
log.info(s"Store addon file from '${url.asString} for ${collective.id}") *>
|
||||
storeAddonFromUrl(collective, url).flatMapF { file =>
|
||||
val localUrl = FileUrlReader.url(file)
|
||||
for {
|
||||
_ <- log.info(s"Validating addon…")
|
||||
res <- validateAndInsert(file, localUrl)
|
||||
_ <- log.info(s"Validation result: $res")
|
||||
} yield res
|
||||
}.value
|
||||
}
|
||||
|
||||
def refreshAddon(
|
||||
collective: Ident,
|
||||
addonId: Ident
|
||||
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] = {
|
||||
val findAddon = store
|
||||
.transact(RAddonArchive.findById(collective, addonId))
|
||||
.map(_.toRight(AddonNotFound))
|
||||
def validateAddon(aa: RAddonArchive): F[AddonValidationResult[AddonMeta]] =
|
||||
aa.originalUrl.fold(
|
||||
AddonValidationResult.failure[AddonMeta](RefreshLocalAddon).pure[F]
|
||||
)(url =>
|
||||
addonValidate.fromUrl(collective, url, urlReader, checkExisting = false)
|
||||
)
|
||||
|
||||
EitherT(findAddon).flatMap { aa =>
|
||||
EitherT(validateAddon(aa))
|
||||
.flatMap(meta => refreshAddon(aa, meta).map(na => na -> meta))
|
||||
}.value
|
||||
}
|
||||
|
||||
private def refreshAddon(
|
||||
r: RAddonArchive,
|
||||
meta: AddonMeta
|
||||
): EitherT[F, AddonValidationError, RAddonArchive] =
|
||||
if (r.isUnchanged(meta)) EitherT.pure(r)
|
||||
else
|
||||
r.originalUrl match {
|
||||
case Some(url) =>
|
||||
EitherT(
|
||||
store
|
||||
.transact(
|
||||
RAddonArchive
|
||||
.findByNameAndVersion(r.cid, meta.meta.name, meta.meta.version)
|
||||
)
|
||||
.map(
|
||||
_.fold(().asRight[AddonValidationError])(rx => AddonExists(rx).toLeft)
|
||||
)
|
||||
).flatMap(_ =>
|
||||
storeAddonFromUrl(r.cid, url).flatMap { file =>
|
||||
val nr = r.update(file, meta)
|
||||
for {
|
||||
_ <- EitherT(
|
||||
store
|
||||
.transact(RAddonArchive.update(nr))
|
||||
.map(_.asRight[AddonValidationError])
|
||||
.recoverWith { case ex =>
|
||||
logger.warn(ex)(s"Storing addon metadata failed.") *>
|
||||
store.fileRepo
|
||||
.delete(file)
|
||||
.as(
|
||||
AddonExists(
|
||||
s"The addon '${nr.name}/${nr.version}' could not be stored",
|
||||
nr
|
||||
).toLeft
|
||||
)
|
||||
}
|
||||
)
|
||||
_ <- EitherT.liftF(store.fileRepo.delete(r.fileId))
|
||||
} yield nr
|
||||
}
|
||||
)
|
||||
case None =>
|
||||
EitherT.leftT(RefreshLocalAddon.cast)
|
||||
}
|
||||
|
||||
private def insertAddon(
|
||||
collective: Ident,
|
||||
url: LenientUri,
|
||||
meta: AddonMeta,
|
||||
file: FileKey
|
||||
): F[RAddonArchive] =
|
||||
for {
|
||||
now <- Timestamp.current[F]
|
||||
aId <- Ident.randomId[F]
|
||||
record = RAddonArchive(
|
||||
aId,
|
||||
collective,
|
||||
file,
|
||||
url.some,
|
||||
meta,
|
||||
now
|
||||
)
|
||||
_ <- store
|
||||
.transact(RAddonArchive.insert(record, silent = false))
|
||||
.onError(_ => store.fileRepo.delete(file))
|
||||
} yield record
|
||||
|
||||
private def storeAddonFromUrl(collective: Ident, url: LenientUri) =
|
||||
for {
|
||||
urlFile <- EitherT.pure(url.path.segments.lastOption)
|
||||
file <- EitherT(
|
||||
urlReader(url)
|
||||
.through(
|
||||
store.fileRepo.save(
|
||||
collective,
|
||||
FileCategory.Addon,
|
||||
MimeTypeHint(urlFile, zip.some)
|
||||
)
|
||||
)
|
||||
.compile
|
||||
.lastOrError
|
||||
.attempt
|
||||
.map(_.leftMap(DownloadFailed(_).cast))
|
||||
)
|
||||
} yield file
|
||||
|
||||
def runAddonForItem(
|
||||
account: AccountId,
|
||||
itemIds: NonEmptyList[Ident],
|
||||
addonRunConfigIds: Set[Ident]
|
||||
): F[Unit] =
|
||||
for {
|
||||
jobs <- itemIds.traverse(id =>
|
||||
JobFactory.existingItemAddon(
|
||||
ItemAddonTaskArgs(account.collective, id, addonRunConfigIds),
|
||||
account
|
||||
)
|
||||
)
|
||||
_ <- jobStore.insertAllIfNew(jobs.map(_.encode).toList)
|
||||
} yield ()
|
||||
}
|
||||
}
|
@ -0,0 +1,223 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.ops
|
||||
|
||||
import cats.data.{NonEmptyList => Nel, OptionT}
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
import fs2.Stream
|
||||
|
||||
import docspell.backend.JobFactory
|
||||
import docspell.common.MakePreviewArgs.StoreMode
|
||||
import docspell.common._
|
||||
import docspell.files.TikaMimetype
|
||||
import docspell.ftsclient.{FtsClient, TextData}
|
||||
import docspell.scheduler.JobStore
|
||||
import docspell.store.Store
|
||||
import docspell.store.queries.QAttachment
|
||||
import docspell.store.records._
|
||||
|
||||
trait OAttachment[F[_]] {
|
||||
|
||||
def setExtractedText(
|
||||
collective: Ident,
|
||||
itemId: Ident,
|
||||
attachId: Ident,
|
||||
newText: F[String]
|
||||
): F[Unit]
|
||||
|
||||
def addOrReplacePdf(
|
||||
collective: Ident,
|
||||
attachId: Ident,
|
||||
pdfData: Stream[F, Byte],
|
||||
regeneratePreview: Boolean
|
||||
): F[Unit]
|
||||
|
||||
def addOrReplacePreview(
|
||||
collective: Ident,
|
||||
attachId: Ident,
|
||||
imageData: Stream[F, Byte]
|
||||
): F[Unit]
|
||||
}
|
||||
|
||||
object OAttachment {
|
||||
|
||||
def apply[F[_]: Sync](
|
||||
store: Store[F],
|
||||
fts: FtsClient[F],
|
||||
jobStore: JobStore[F]
|
||||
): OAttachment[F] =
|
||||
new OAttachment[F] {
|
||||
private[this] val logger = docspell.logging.getLogger[F]
|
||||
|
||||
def setExtractedText(
|
||||
collective: Ident,
|
||||
itemId: Ident,
|
||||
attachId: Ident,
|
||||
newText: F[String]
|
||||
): F[Unit] =
|
||||
for {
|
||||
_ <- logger.info(s"Find attachment ${attachId.id} to update extracted text.")
|
||||
cca <- store
|
||||
.transact(
|
||||
QAttachment
|
||||
.allAttachmentMetaAndName(
|
||||
collective.some,
|
||||
Nel.of(itemId).some,
|
||||
ItemState.validStates.append(ItemState.Processing),
|
||||
100
|
||||
)
|
||||
)
|
||||
.filter(_.id == attachId)
|
||||
.compile
|
||||
.last
|
||||
content = cca.find(_.id == attachId)
|
||||
_ <- logger.debug(s"Found existing metadata: ${content.isDefined}")
|
||||
_ <- OptionT
|
||||
.fromOption(content)
|
||||
.semiflatMap { cnt =>
|
||||
for {
|
||||
_ <- logger.debug(s"Setting new extracted text on ${cnt.id.id}")
|
||||
text <- newText
|
||||
td = TextData.attachment(
|
||||
cnt.item,
|
||||
cnt.id,
|
||||
cnt.collective,
|
||||
cnt.folder,
|
||||
cnt.lang,
|
||||
cnt.name,
|
||||
text.some
|
||||
)
|
||||
_ <- store.transact(RAttachmentMeta.updateContent(attachId, text))
|
||||
_ <- fts.updateIndex(logger, td)
|
||||
} yield ()
|
||||
}
|
||||
.getOrElseF(
|
||||
logger.warn(
|
||||
s"Item or attachment meta not found to update text: ${itemId.id}"
|
||||
)
|
||||
)
|
||||
} yield ()
|
||||
|
||||
def addOrReplacePdf(
|
||||
collective: Ident,
|
||||
attachId: Ident,
|
||||
pdfData: Stream[F, Byte],
|
||||
regeneratePreview: Boolean
|
||||
): F[Unit] = {
|
||||
def generatePreview(ra: RAttachment): F[Unit] =
|
||||
JobFactory
|
||||
.makePreview(MakePreviewArgs(ra.id, StoreMode.Replace), None)
|
||||
.map(_.encode)
|
||||
.flatMap(jobStore.insert) *>
|
||||
logger.info(s"Job submitted to re-generate preview from new pdf")
|
||||
|
||||
def generatePageCount(ra: RAttachment): F[Unit] =
|
||||
JobFactory
|
||||
.makePageCount(
|
||||
MakePageCountArgs(ra.id),
|
||||
AccountId(collective, DocspellSystem.user).some
|
||||
)
|
||||
.map(_.encode)
|
||||
.flatMap(jobStore.insert) *>
|
||||
logger.info(s"Job submitted to find page count from new pdf")
|
||||
|
||||
def setFile(ra: RAttachment, rs: RAttachmentSource) =
|
||||
for {
|
||||
_ <- requireMimeType(pdfData, MimeType.pdf)
|
||||
|
||||
newFile <- pdfData
|
||||
.through(
|
||||
store.fileRepo.save(
|
||||
collective,
|
||||
FileCategory.AttachmentConvert,
|
||||
MimeTypeHint.advertised(MimeType.pdf)
|
||||
)
|
||||
)
|
||||
.compile
|
||||
.lastOrError
|
||||
|
||||
_ <- store.transact(RAttachment.updateFileId(attachId, newFile))
|
||||
_ <- logger.info(s"Deleting old file for attachment")
|
||||
_ <-
|
||||
if (rs.fileId == ra.fileId) ().pure[F]
|
||||
else store.fileRepo.delete(ra.fileId)
|
||||
_ <-
|
||||
if (regeneratePreview) generatePreview(ra)
|
||||
else ().pure[F]
|
||||
_ <- generatePageCount(ra)
|
||||
} yield ()
|
||||
|
||||
(for {
|
||||
ra <- OptionT(
|
||||
store.transact(RAttachment.findByIdAndCollective(attachId, collective))
|
||||
)
|
||||
rs <- OptionT(
|
||||
store.transact(RAttachmentSource.findByIdAndCollective(attachId, collective))
|
||||
)
|
||||
_ <- OptionT.liftF(setFile(ra, rs))
|
||||
} yield ()).getOrElseF(
|
||||
logger.warn(
|
||||
s"Cannot replace pdf file. Attachment not found for id: ${attachId.id}"
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
def addOrReplacePreview(
|
||||
collective: Ident,
|
||||
attachId: Ident,
|
||||
imageData: Stream[F, Byte]
|
||||
): F[Unit] = {
|
||||
def setFile(ra: RAttachment): F[Unit] =
|
||||
for {
|
||||
_ <- requireMimeType(imageData, MimeType.image("*"))
|
||||
newFile <- imageData
|
||||
.through(
|
||||
store.fileRepo
|
||||
.save(collective, FileCategory.PreviewImage, MimeTypeHint.none)
|
||||
)
|
||||
.compile
|
||||
.lastOrError
|
||||
|
||||
now <- Timestamp.current[F]
|
||||
record = RAttachmentPreview(ra.id, newFile, None, now)
|
||||
oldFile <- store.transact(RAttachmentPreview.upsert(record))
|
||||
_ <- OptionT
|
||||
.fromOption(oldFile)
|
||||
.semiflatMap(store.fileRepo.delete)
|
||||
.getOrElse(())
|
||||
} yield ()
|
||||
|
||||
(for {
|
||||
ra <- OptionT(
|
||||
store.transact(RAttachment.findByIdAndCollective(attachId, collective))
|
||||
)
|
||||
_ <- OptionT.liftF(setFile(ra))
|
||||
} yield ()).getOrElseF(
|
||||
logger.warn(
|
||||
s"Cannot add/replace preview file. Attachment not found for id: ${attachId.id}"
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private def requireMimeType[F[_]: Sync](
|
||||
data: Stream[F, Byte],
|
||||
expectedMime: MimeType
|
||||
): F[Unit] =
|
||||
TikaMimetype
|
||||
.detect(data, MimeTypeHint.advertised(expectedMime))
|
||||
.flatMap { mime =>
|
||||
if (expectedMime.matches(mime)) ().pure[F]
|
||||
else
|
||||
Sync[F].raiseError(
|
||||
new IllegalArgumentException(
|
||||
s"Expected pdf file, but got: ${mime.asString}"
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
@ -61,6 +61,12 @@ trait OItem[F[_]] {
|
||||
collective: Ident
|
||||
): F[AttachedEvent[UpdateResult]]
|
||||
|
||||
def removeTagsOfCategories(
|
||||
item: Ident,
|
||||
collective: Ident,
|
||||
categories: Set[String]
|
||||
): F[AttachedEvent[UpdateResult]]
|
||||
|
||||
def removeTagsMultipleItems(
|
||||
items: Nel[Ident],
|
||||
tags: List[String],
|
||||
@ -80,11 +86,13 @@ trait OItem[F[_]] {
|
||||
collective: Ident
|
||||
): F[UpdateResult]
|
||||
|
||||
def setFolder(item: Ident, folder: Option[Ident], collective: Ident): F[UpdateResult]
|
||||
/** Set or remove the folder on an item. Folder can be the id or name. */
|
||||
def setFolder(item: Ident, folder: Option[String], collective: Ident): F[UpdateResult]
|
||||
|
||||
/** Set or remove the folder on multiple items. Folder can be the id or name. */
|
||||
def setFolderMultiple(
|
||||
items: Nel[Ident],
|
||||
folder: Option[Ident],
|
||||
folder: Option[String],
|
||||
collective: Ident
|
||||
): F[UpdateResult]
|
||||
|
||||
@ -122,6 +130,13 @@ trait OItem[F[_]] {
|
||||
|
||||
def setNotes(item: Ident, notes: Option[String], collective: Ident): F[UpdateResult]
|
||||
|
||||
def addNotes(
|
||||
item: Ident,
|
||||
notes: String,
|
||||
separator: Option[String],
|
||||
collective: Ident
|
||||
): F[UpdateResult]
|
||||
|
||||
def setName(item: Ident, name: String, collective: Ident): F[UpdateResult]
|
||||
|
||||
def setNameMultiple(
|
||||
@ -288,6 +303,28 @@ object OItem {
|
||||
}
|
||||
}
|
||||
|
||||
def removeTagsOfCategories(
|
||||
item: Ident,
|
||||
collective: Ident,
|
||||
categories: Set[String]
|
||||
): F[AttachedEvent[UpdateResult]] =
|
||||
if (categories.isEmpty) {
|
||||
AttachedEvent.only(UpdateResult.success).pure[F]
|
||||
} else {
|
||||
val dbtask =
|
||||
for {
|
||||
tags <- RTag.findByItem(item)
|
||||
removeTags = tags.filter(_.category.exists(categories.contains))
|
||||
_ <- RTagItem.removeAllTags(item, removeTags.map(_.tagId))
|
||||
mkEvent = Event.TagsChanged
|
||||
.partial(Nel.of(item), Nil, removeTags.map(_.tagId.id).toList)
|
||||
} yield AttachedEvent(UpdateResult.success)(mkEvent)
|
||||
|
||||
OptionT(store.transact(RItem.checkByIdAndCollective(item, collective)))
|
||||
.semiflatMap(_ => store.transact(dbtask))
|
||||
.getOrElse(AttachedEvent.only(UpdateResult.notFound))
|
||||
}
|
||||
|
||||
def removeTagsMultipleItems(
|
||||
items: Nel[Ident],
|
||||
tags: List[String],
|
||||
@ -420,21 +457,27 @@ object OItem {
|
||||
|
||||
def setFolder(
|
||||
item: Ident,
|
||||
folder: Option[Ident],
|
||||
folder: Option[String],
|
||||
collective: Ident
|
||||
): F[UpdateResult] =
|
||||
UpdateResult
|
||||
.fromUpdate(
|
||||
store
|
||||
.transact(RItem.updateFolder(item, collective, folder))
|
||||
for {
|
||||
result <- store.transact(RItem.updateFolder(item, collective, folder)).attempt
|
||||
ures = result.fold(
|
||||
UpdateResult.failure,
|
||||
t => UpdateResult.fromUpdateRows(t._1)
|
||||
)
|
||||
.flatTap(
|
||||
onSuccessIgnoreError(fts.updateFolder(logger, item, collective, folder))
|
||||
_ <- result.fold(
|
||||
_ => ().pure[F],
|
||||
t =>
|
||||
onSuccessIgnoreError(fts.updateFolder(logger, item, collective, t._2))(
|
||||
ures
|
||||
)
|
||||
)
|
||||
} yield ures
|
||||
|
||||
def setFolderMultiple(
|
||||
items: Nel[Ident],
|
||||
folder: Option[Ident],
|
||||
folder: Option[String],
|
||||
collective: Ident
|
||||
): F[UpdateResult] =
|
||||
for {
|
||||
@ -615,6 +658,33 @@ object OItem {
|
||||
}
|
||||
)
|
||||
|
||||
def addNotes(
|
||||
item: Ident,
|
||||
notes: String,
|
||||
separator: Option[String],
|
||||
collective: Ident
|
||||
): F[UpdateResult] =
|
||||
store
|
||||
.transact(RItem.appendNotes(item, collective, notes, separator))
|
||||
.flatMap {
|
||||
case Some(newNotes) =>
|
||||
store
|
||||
.transact(RCollective.findLanguage(collective))
|
||||
.map(_.getOrElse(Language.English))
|
||||
.flatMap(lang =>
|
||||
fts.updateItemNotes(logger, item, collective, lang, newNotes.some)
|
||||
)
|
||||
.attempt
|
||||
.flatMap {
|
||||
case Right(()) => ().pure[F]
|
||||
case Left(ex) =>
|
||||
logger.warn(s"Error updating full-text index: ${ex.getMessage}")
|
||||
}
|
||||
.as(UpdateResult.success)
|
||||
case None =>
|
||||
UpdateResult.notFound.pure[F]
|
||||
}
|
||||
|
||||
def setName(item: Ident, name: String, collective: Ident): F[UpdateResult] =
|
||||
UpdateResult
|
||||
.fromUpdate(
|
||||
|
@ -6,11 +6,13 @@
|
||||
|
||||
package docspell.backend.ops
|
||||
|
||||
import cats.Applicative
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import cats.syntax.all._
|
||||
import fs2.Stream
|
||||
|
||||
import docspell.common.Ident
|
||||
import docspell.common.{Ident, NodeType}
|
||||
import docspell.joexapi.client.JoexClient
|
||||
import docspell.joexapi.model.AddonSupport
|
||||
import docspell.pubsub.api.PubSubT
|
||||
import docspell.scheduler.msg.{CancelJob, JobsNotify, PeriodicTaskNotify}
|
||||
|
||||
@ -21,10 +23,16 @@ trait OJoex[F[_]] {
|
||||
def notifyPeriodicTasks: F[Unit]
|
||||
|
||||
def cancelJob(job: Ident, worker: Ident): F[Unit]
|
||||
|
||||
def getAddonSupport: F[List[AddonSupport]]
|
||||
}
|
||||
|
||||
object OJoex {
|
||||
def apply[F[_]: Applicative](pubSub: PubSubT[F]): Resource[F, OJoex[F]] =
|
||||
def apply[F[_]: Async](
|
||||
pubSub: PubSubT[F],
|
||||
nodes: ONode[F],
|
||||
joexClient: JoexClient[F]
|
||||
): Resource[F, OJoex[F]] =
|
||||
Resource.pure[F, OJoex[F]](new OJoex[F] {
|
||||
|
||||
def notifyAllNodes: F[Unit] =
|
||||
@ -35,5 +43,17 @@ object OJoex {
|
||||
|
||||
def cancelJob(job: Ident, worker: Ident): F[Unit] =
|
||||
pubSub.publish1IgnoreErrors(CancelJob.topic, CancelJob(job, worker)).as(())
|
||||
|
||||
def getAddonSupport: F[List[AddonSupport]] =
|
||||
for {
|
||||
joex <- nodes.getNodes(NodeType.Joex)
|
||||
conc = math.max(2, Runtime.getRuntime.availableProcessors() - 1)
|
||||
supp <- Stream
|
||||
.emits(joex)
|
||||
.covary[F]
|
||||
.parEvalMap(conc)(n => joexClient.getAddonSupport(n.url))
|
||||
.compile
|
||||
.toList
|
||||
} yield supp
|
||||
})
|
||||
}
|
||||
|
@ -13,11 +13,27 @@ import docspell.common.{Ident, LenientUri, NodeType}
|
||||
import docspell.store.Store
|
||||
import docspell.store.records.RNode
|
||||
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
trait ONode[F[_]] {
|
||||
|
||||
def register(appId: Ident, nodeType: NodeType, uri: LenientUri): F[Unit]
|
||||
def register(
|
||||
appId: Ident,
|
||||
nodeType: NodeType,
|
||||
uri: LenientUri,
|
||||
serverSecret: Option[ByteVector]
|
||||
): F[Unit]
|
||||
|
||||
def unregister(appId: Ident): F[Unit]
|
||||
|
||||
def withRegistered(
|
||||
appId: Ident,
|
||||
nodeType: NodeType,
|
||||
uri: LenientUri,
|
||||
serverSecret: Option[ByteVector]
|
||||
): Resource[F, Unit]
|
||||
|
||||
def getNodes(nodeType: NodeType): F[Vector[RNode]]
|
||||
}
|
||||
|
||||
object ONode {
|
||||
@ -25,9 +41,14 @@ object ONode {
|
||||
def apply[F[_]: Async](store: Store[F]): Resource[F, ONode[F]] =
|
||||
Resource.pure[F, ONode[F]](new ONode[F] {
|
||||
val logger = docspell.logging.getLogger[F]
|
||||
def register(appId: Ident, nodeType: NodeType, uri: LenientUri): F[Unit] =
|
||||
def register(
|
||||
appId: Ident,
|
||||
nodeType: NodeType,
|
||||
uri: LenientUri,
|
||||
serverSecret: Option[ByteVector]
|
||||
): F[Unit] =
|
||||
for {
|
||||
node <- RNode(appId, nodeType, uri)
|
||||
node <- RNode(appId, nodeType, uri, serverSecret)
|
||||
_ <- logger.info(s"Registering node ${node.id.id}")
|
||||
_ <- store.transact(RNode.set(node))
|
||||
} yield ()
|
||||
@ -35,6 +56,19 @@ object ONode {
|
||||
def unregister(appId: Ident): F[Unit] =
|
||||
logger.info(s"Unregister app ${appId.id}") *>
|
||||
store.transact(RNode.delete(appId)).map(_ => ())
|
||||
|
||||
def withRegistered(
|
||||
appId: Ident,
|
||||
nodeType: NodeType,
|
||||
uri: LenientUri,
|
||||
serverSecret: Option[ByteVector]
|
||||
): Resource[F, Unit] =
|
||||
Resource.make(register(appId, nodeType, uri, serverSecret))(_ =>
|
||||
unregister(appId)
|
||||
)
|
||||
|
||||
def getNodes(nodeType: NodeType): F[Vector[RNode]] =
|
||||
store.transact(RNode.findAll(nodeType))
|
||||
})
|
||||
|
||||
}
|
||||
|
Reference in New Issue
Block a user