mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-04-05 19:09:32 +00:00
Merge pull request #1550 from eikek/addons-experiment
Addons experiment
This commit is contained in:
commit
8a86de43de
44
build.sbt
44
build.sbt
@ -293,6 +293,15 @@ val openapiScalaSettings = Seq(
|
|||||||
field.copy(typeDef =
|
field.copy(typeDef =
|
||||||
TypeDef("DownloadState", Imports("docspell.common.DownloadState"))
|
TypeDef("DownloadState", Imports("docspell.common.DownloadState"))
|
||||||
)
|
)
|
||||||
|
case "addon-trigger-type" =>
|
||||||
|
field =>
|
||||||
|
field.copy(typeDef =
|
||||||
|
TypeDef("AddonTriggerType", Imports("docspell.addons.AddonTriggerType"))
|
||||||
|
)
|
||||||
|
case "addon-runner-type" =>
|
||||||
|
field =>
|
||||||
|
field
|
||||||
|
.copy(typeDef = TypeDef("RunnerType", Imports("docspell.addons.RunnerType")))
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -325,6 +334,7 @@ val common = project
|
|||||||
libraryDependencies ++=
|
libraryDependencies ++=
|
||||||
Dependencies.fs2 ++
|
Dependencies.fs2 ++
|
||||||
Dependencies.circe ++
|
Dependencies.circe ++
|
||||||
|
Dependencies.circeGenericExtra ++
|
||||||
Dependencies.calevCore ++
|
Dependencies.calevCore ++
|
||||||
Dependencies.calevCirce
|
Dependencies.calevCirce
|
||||||
)
|
)
|
||||||
@ -351,7 +361,7 @@ val files = project
|
|||||||
.in(file("modules/files"))
|
.in(file("modules/files"))
|
||||||
.disablePlugins(RevolverPlugin)
|
.disablePlugins(RevolverPlugin)
|
||||||
.settings(sharedSettings)
|
.settings(sharedSettings)
|
||||||
.withTestSettings
|
.withTestSettingsDependsOn(loggingScribe)
|
||||||
.settings(
|
.settings(
|
||||||
name := "docspell-files",
|
name := "docspell-files",
|
||||||
libraryDependencies ++=
|
libraryDependencies ++=
|
||||||
@ -448,6 +458,19 @@ val notificationApi = project
|
|||||||
)
|
)
|
||||||
.dependsOn(common, loggingScribe)
|
.dependsOn(common, loggingScribe)
|
||||||
|
|
||||||
|
val addonlib = project
|
||||||
|
.in(file("modules/addonlib"))
|
||||||
|
.disablePlugins(RevolverPlugin)
|
||||||
|
.settings(sharedSettings)
|
||||||
|
.withTestSettingsDependsOn(loggingScribe)
|
||||||
|
.settings(
|
||||||
|
libraryDependencies ++=
|
||||||
|
Dependencies.fs2 ++
|
||||||
|
Dependencies.circe ++
|
||||||
|
Dependencies.circeYaml
|
||||||
|
)
|
||||||
|
.dependsOn(common, files, loggingScribe)
|
||||||
|
|
||||||
val store = project
|
val store = project
|
||||||
.in(file("modules/store"))
|
.in(file("modules/store"))
|
||||||
.disablePlugins(RevolverPlugin)
|
.disablePlugins(RevolverPlugin)
|
||||||
@ -469,7 +492,16 @@ val store = project
|
|||||||
libraryDependencies ++=
|
libraryDependencies ++=
|
||||||
Dependencies.testContainer.map(_ % Test)
|
Dependencies.testContainer.map(_ % Test)
|
||||||
)
|
)
|
||||||
.dependsOn(common, query.jvm, totp, files, notificationApi, jsonminiq, loggingScribe)
|
.dependsOn(
|
||||||
|
common,
|
||||||
|
addonlib,
|
||||||
|
query.jvm,
|
||||||
|
totp,
|
||||||
|
files,
|
||||||
|
notificationApi,
|
||||||
|
jsonminiq,
|
||||||
|
loggingScribe
|
||||||
|
)
|
||||||
|
|
||||||
val notificationImpl = project
|
val notificationImpl = project
|
||||||
.in(file("modules/notification/impl"))
|
.in(file("modules/notification/impl"))
|
||||||
@ -647,7 +679,7 @@ val restapi = project
|
|||||||
openapiSpec := (Compile / resourceDirectory).value / "docspell-openapi.yml",
|
openapiSpec := (Compile / resourceDirectory).value / "docspell-openapi.yml",
|
||||||
openapiStaticGen := OpenApiDocGenerator.Redoc
|
openapiStaticGen := OpenApiDocGenerator.Redoc
|
||||||
)
|
)
|
||||||
.dependsOn(common, query.jvm, notificationApi, jsonminiq)
|
.dependsOn(common, query.jvm, notificationApi, jsonminiq, addonlib)
|
||||||
|
|
||||||
val joexapi = project
|
val joexapi = project
|
||||||
.in(file("modules/joexapi"))
|
.in(file("modules/joexapi"))
|
||||||
@ -667,7 +699,7 @@ val joexapi = project
|
|||||||
openapiSpec := (Compile / resourceDirectory).value / "joex-openapi.yml",
|
openapiSpec := (Compile / resourceDirectory).value / "joex-openapi.yml",
|
||||||
openapiStaticGen := OpenApiDocGenerator.Redoc
|
openapiStaticGen := OpenApiDocGenerator.Redoc
|
||||||
)
|
)
|
||||||
.dependsOn(common, loggingScribe)
|
.dependsOn(common, loggingScribe, addonlib)
|
||||||
|
|
||||||
val backend = project
|
val backend = project
|
||||||
.in(file("modules/backend"))
|
.in(file("modules/backend"))
|
||||||
@ -683,6 +715,7 @@ val backend = project
|
|||||||
Dependencies.emil
|
Dependencies.emil
|
||||||
)
|
)
|
||||||
.dependsOn(
|
.dependsOn(
|
||||||
|
addonlib,
|
||||||
store,
|
store,
|
||||||
notificationApi,
|
notificationApi,
|
||||||
joexapi,
|
joexapi,
|
||||||
@ -739,7 +772,7 @@ val config = project
|
|||||||
Dependencies.fs2 ++
|
Dependencies.fs2 ++
|
||||||
Dependencies.pureconfig
|
Dependencies.pureconfig
|
||||||
)
|
)
|
||||||
.dependsOn(common, loggingApi, ftspsql, store)
|
.dependsOn(common, loggingApi, ftspsql, store, addonlib)
|
||||||
|
|
||||||
// --- Application(s)
|
// --- Application(s)
|
||||||
|
|
||||||
@ -946,6 +979,7 @@ val root = project
|
|||||||
)
|
)
|
||||||
.aggregate(
|
.aggregate(
|
||||||
common,
|
common,
|
||||||
|
addonlib,
|
||||||
loggingApi,
|
loggingApi,
|
||||||
loggingScribe,
|
loggingScribe,
|
||||||
config,
|
config,
|
||||||
|
@ -0,0 +1,90 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.Stream
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.files.Zip
|
||||||
|
|
||||||
|
final case class AddonArchive(url: LenientUri, name: String, version: String) {
|
||||||
|
def nameAndVersion: String =
|
||||||
|
s"$name-$version"
|
||||||
|
|
||||||
|
def extractTo[F[_]: Async](
|
||||||
|
reader: UrlReader[F],
|
||||||
|
directory: Path,
|
||||||
|
withSubdir: Boolean = true,
|
||||||
|
glob: Glob = Glob.all
|
||||||
|
): F[Path] = {
|
||||||
|
val logger = docspell.logging.getLogger[F]
|
||||||
|
val target =
|
||||||
|
if (withSubdir) directory.absolute / nameAndVersion
|
||||||
|
else directory.absolute
|
||||||
|
|
||||||
|
Files[F]
|
||||||
|
.exists(target)
|
||||||
|
.flatMap {
|
||||||
|
case true => target.pure[F]
|
||||||
|
case false =>
|
||||||
|
Files[F].createDirectories(target) *>
|
||||||
|
reader(url)
|
||||||
|
.through(Zip.unzip(8192, glob))
|
||||||
|
.through(Zip.saveTo(logger, target, moveUp = true))
|
||||||
|
.compile
|
||||||
|
.drain
|
||||||
|
.as(target)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Read meta either from the given directory or extract the url to find the metadata
|
||||||
|
* file to read
|
||||||
|
*/
|
||||||
|
def readMeta[F[_]: Async](
|
||||||
|
urlReader: UrlReader[F],
|
||||||
|
directory: Option[Path] = None
|
||||||
|
): F[AddonMeta] =
|
||||||
|
directory
|
||||||
|
.map(AddonMeta.findInDirectory[F])
|
||||||
|
.getOrElse(AddonMeta.findInZip(urlReader(url)))
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonArchive {
|
||||||
|
def read[F[_]: Async](
|
||||||
|
url: LenientUri,
|
||||||
|
urlReader: UrlReader[F],
|
||||||
|
extractDir: Option[Path] = None
|
||||||
|
): F[AddonArchive] = {
|
||||||
|
val addon = AddonArchive(url, "", "")
|
||||||
|
addon
|
||||||
|
.readMeta(urlReader, extractDir)
|
||||||
|
.map(m => addon.copy(name = m.meta.name, version = m.meta.version))
|
||||||
|
}
|
||||||
|
|
||||||
|
def dockerAndFlakeExists[F[_]: Async](
|
||||||
|
archive: Either[Path, Stream[F, Byte]]
|
||||||
|
): F[(Boolean, Boolean)] = {
|
||||||
|
val files = Files[F]
|
||||||
|
def forPath(path: Path): F[(Boolean, Boolean)] =
|
||||||
|
(files.exists(path / "Dockerfile"), files.exists(path / "flake.nix")).tupled
|
||||||
|
|
||||||
|
def forZip(data: Stream[F, Byte]): F[(Boolean, Boolean)] =
|
||||||
|
data
|
||||||
|
.through(Zip.unzip(8192, Glob("Dockerfile|flake.nix")))
|
||||||
|
.collect {
|
||||||
|
case bin if bin.name == "Dockerfile" => (true, false)
|
||||||
|
case bin if bin.name == "flake.nix" => (false, true)
|
||||||
|
}
|
||||||
|
.compile
|
||||||
|
.fold((false, false))((r, e) => (r._1 || e._1, r._2 || e._2))
|
||||||
|
|
||||||
|
archive.fold(forPath, forZip)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,33 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.Monoid
|
||||||
|
import cats.syntax.all._
|
||||||
|
|
||||||
|
case class AddonExecutionResult(
|
||||||
|
addonResults: List[AddonResult],
|
||||||
|
pure: Boolean
|
||||||
|
) {
|
||||||
|
def addonResult: AddonResult = addonResults.combineAll
|
||||||
|
def isFailure: Boolean = addonResult.isFailure
|
||||||
|
def isSuccess: Boolean = addonResult.isSuccess
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonExecutionResult {
|
||||||
|
val empty: AddonExecutionResult =
|
||||||
|
AddonExecutionResult(Nil, false)
|
||||||
|
|
||||||
|
def combine(a: AddonExecutionResult, b: AddonExecutionResult): AddonExecutionResult =
|
||||||
|
AddonExecutionResult(
|
||||||
|
a.addonResults ::: b.addonResults,
|
||||||
|
a.pure && b.pure
|
||||||
|
)
|
||||||
|
|
||||||
|
implicit val executionResultMonoid: Monoid[AddonExecutionResult] =
|
||||||
|
Monoid.instance(empty, combine)
|
||||||
|
}
|
@ -0,0 +1,121 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.data.Kleisli
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.Stream
|
||||||
|
import fs2.io.file._
|
||||||
|
|
||||||
|
import docspell.common.UrlReader
|
||||||
|
import docspell.common.exec.Env
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
trait AddonExecutor[F[_]] {
|
||||||
|
|
||||||
|
def config: AddonExecutorConfig
|
||||||
|
|
||||||
|
def execute(logger: Logger[F]): AddonExec[F]
|
||||||
|
|
||||||
|
def execute(logger: Logger[F], in: InputEnv): F[AddonExecutionResult] =
|
||||||
|
execute(logger).run(in)
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonExecutor {
|
||||||
|
|
||||||
|
def apply[F[_]: Async](
|
||||||
|
cfg: AddonExecutorConfig,
|
||||||
|
urlReader: UrlReader[F]
|
||||||
|
): AddonExecutor[F] =
|
||||||
|
new AddonExecutor[F] with AddonLoggerExtension {
|
||||||
|
val config = cfg
|
||||||
|
|
||||||
|
def execute(logger: Logger[F]): AddonExec[F] =
|
||||||
|
Kleisli { in =>
|
||||||
|
for {
|
||||||
|
_ <- logger.info(s"About to run ${in.addons.size} addon(s) in ${in.baseDir}")
|
||||||
|
ctx <- prepareDirectory(
|
||||||
|
logger,
|
||||||
|
in.baseDir,
|
||||||
|
in.outputDir,
|
||||||
|
in.cacheDir,
|
||||||
|
in.addons
|
||||||
|
)
|
||||||
|
rs <- ctx.traverse(c => runAddon(logger.withAddon(c), in.env)(c))
|
||||||
|
pure = ctx.foldl(true)((b, c) => b && c.meta.isPure)
|
||||||
|
} yield AddonExecutionResult(rs, pure)
|
||||||
|
}
|
||||||
|
|
||||||
|
private def prepareDirectory(
|
||||||
|
logger: Logger[F],
|
||||||
|
baseDir: Path,
|
||||||
|
outDir: Path,
|
||||||
|
cacheDir: Path,
|
||||||
|
addons: List[AddonRef]
|
||||||
|
): F[List[Context]] =
|
||||||
|
for {
|
||||||
|
addonsDir <- Directory.create(baseDir / "addons")
|
||||||
|
_ <- Directory.createAll(Context.tempDir(baseDir), outDir, cacheDir)
|
||||||
|
_ <- Context
|
||||||
|
.userInputFile(baseDir)
|
||||||
|
.parent
|
||||||
|
.fold(().pure[F])(Files[F].createDirectories)
|
||||||
|
archives = addons.map(_.archive).distinctBy(_.url)
|
||||||
|
_ <- logger.info(s"Extract ${archives.size} addons to $addonsDir")
|
||||||
|
mkCtxs <- archives.traverse { archive =>
|
||||||
|
for {
|
||||||
|
_ <- logger.debug(s"Extracting $archive")
|
||||||
|
addonDir <- archive.extractTo(urlReader, addonsDir)
|
||||||
|
meta <- AddonMeta.findInDirectory(addonDir)
|
||||||
|
mkCtx = (ref: AddonRef) =>
|
||||||
|
Context(ref, meta, baseDir, addonDir, outDir, cacheDir)
|
||||||
|
} yield archive.url -> mkCtx
|
||||||
|
}
|
||||||
|
ctxFactory = mkCtxs.toMap
|
||||||
|
res = addons.map(ref => ctxFactory(ref.archive.url)(ref))
|
||||||
|
} yield res
|
||||||
|
|
||||||
|
private def runAddon(logger: Logger[F], env: Env)(
|
||||||
|
ctx: Context
|
||||||
|
): F[AddonResult] =
|
||||||
|
for {
|
||||||
|
_ <- logger.info(s"Executing addon ${ctx.meta.nameAndVersion}")
|
||||||
|
_ <- logger.trace("Storing user input into file")
|
||||||
|
_ <- Stream
|
||||||
|
.emit(ctx.addon.args)
|
||||||
|
.through(fs2.text.utf8.encode)
|
||||||
|
.through(Files[F].writeAll(ctx.userInputFile, Flags.Write))
|
||||||
|
.compile
|
||||||
|
.drain
|
||||||
|
|
||||||
|
runner <- selectRunner(cfg, ctx.meta, ctx.addonDir)
|
||||||
|
result <- runner.run(logger, env, ctx)
|
||||||
|
} yield result
|
||||||
|
}
|
||||||
|
|
||||||
|
def selectRunner[F[_]: Async](
|
||||||
|
cfg: AddonExecutorConfig,
|
||||||
|
meta: AddonMeta,
|
||||||
|
addonDir: Path
|
||||||
|
): F[AddonRunner[F]] =
|
||||||
|
for {
|
||||||
|
addonRunner <- meta.enabledTypes(Left(addonDir))
|
||||||
|
// intersect on list retains order in first
|
||||||
|
possibleRunner = cfg.runner
|
||||||
|
.intersect(addonRunner)
|
||||||
|
.map(AddonRunner.forType[F](cfg))
|
||||||
|
runner = possibleRunner match {
|
||||||
|
case Nil =>
|
||||||
|
AddonRunner.failWith(
|
||||||
|
s"No runner available for addon config ${meta.runner} and config ${cfg.runner}."
|
||||||
|
)
|
||||||
|
case list =>
|
||||||
|
AddonRunner.firstSuccessful(list)
|
||||||
|
}
|
||||||
|
} yield runner
|
||||||
|
}
|
@ -0,0 +1,45 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import docspell.addons.AddonExecutorConfig._
|
||||||
|
import docspell.common.Duration
|
||||||
|
import docspell.common.exec.{Args, SysCmd}
|
||||||
|
|
||||||
|
case class AddonExecutorConfig(
|
||||||
|
runner: List[RunnerType],
|
||||||
|
runTimeout: Duration,
|
||||||
|
nspawn: NSpawn,
|
||||||
|
nixRunner: NixConfig,
|
||||||
|
dockerRunner: DockerConfig
|
||||||
|
)
|
||||||
|
|
||||||
|
object AddonExecutorConfig {
|
||||||
|
|
||||||
|
case class NSpawn(
|
||||||
|
enabled: Boolean,
|
||||||
|
sudoBinary: String,
|
||||||
|
nspawnBinary: String,
|
||||||
|
containerWait: Duration
|
||||||
|
) {
|
||||||
|
val nspawnVersion =
|
||||||
|
SysCmd(nspawnBinary, Args.of("--version")).withTimeout(Duration.seconds(2))
|
||||||
|
}
|
||||||
|
|
||||||
|
case class NixConfig(
|
||||||
|
nixBinary: String,
|
||||||
|
buildTimeout: Duration
|
||||||
|
)
|
||||||
|
|
||||||
|
case class DockerConfig(
|
||||||
|
dockerBinary: String,
|
||||||
|
buildTimeout: Duration
|
||||||
|
) {
|
||||||
|
def dockerBuild(imageName: String): SysCmd =
|
||||||
|
SysCmd(dockerBinary, "build", "-t", imageName, ".").withTimeout(buildTimeout)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,28 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
trait AddonLoggerExtension {
|
||||||
|
|
||||||
|
implicit final class LoggerAddonOps[F[_]](self: Logger[F]) {
|
||||||
|
private val addonName = "addon-name"
|
||||||
|
private val addonVersion = "addon-version"
|
||||||
|
|
||||||
|
def withAddon(r: AddonArchive): Logger[F] =
|
||||||
|
self.capture(addonName, r.name).capture(addonVersion, r.version)
|
||||||
|
|
||||||
|
def withAddon(r: Context): Logger[F] =
|
||||||
|
withAddon(r.addon.archive)
|
||||||
|
|
||||||
|
def withAddon(m: AddonMeta): Logger[F] =
|
||||||
|
self.capture(addonName, m.meta.name).capture(addonVersion, m.meta.version)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonLoggerExtension extends AddonLoggerExtension
|
216
modules/addonlib/src/main/scala/docspell/addons/AddonMeta.scala
Normal file
216
modules/addonlib/src/main/scala/docspell/addons/AddonMeta.scala
Normal file
@ -0,0 +1,216 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import java.io.FileNotFoundException
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.Stream
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import docspell.common.Glob
|
||||||
|
import docspell.files.Zip
|
||||||
|
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.yaml.{parser => YamlParser}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
import io.circe.{parser => JsonParser}
|
||||||
|
|
||||||
|
case class AddonMeta(
|
||||||
|
meta: AddonMeta.Meta,
|
||||||
|
triggers: Option[Set[AddonTriggerType]],
|
||||||
|
args: Option[List[String]],
|
||||||
|
runner: Option[AddonMeta.Runner],
|
||||||
|
options: Option[AddonMeta.Options]
|
||||||
|
) {
|
||||||
|
|
||||||
|
def nameAndVersion: String =
|
||||||
|
s"${meta.name}-${meta.version}"
|
||||||
|
|
||||||
|
def parseResult: Boolean =
|
||||||
|
options.exists(_.collectOutput)
|
||||||
|
|
||||||
|
def ignoreResult: Boolean =
|
||||||
|
!parseResult
|
||||||
|
|
||||||
|
def isImpure: Boolean =
|
||||||
|
options.exists(_.isImpure)
|
||||||
|
|
||||||
|
def isPure: Boolean =
|
||||||
|
options.forall(_.isPure)
|
||||||
|
|
||||||
|
/** Returns a list of runner types that are possible to use for this addon. This is also
|
||||||
|
* inspecting the archive to return defaults when the addon isn't declaring it in the
|
||||||
|
* descriptor.
|
||||||
|
*/
|
||||||
|
def enabledTypes[F[_]: Async](
|
||||||
|
archive: Either[Path, Stream[F, Byte]]
|
||||||
|
): F[List[RunnerType]] =
|
||||||
|
for {
|
||||||
|
filesExists <- AddonArchive.dockerAndFlakeExists(archive)
|
||||||
|
(dockerFileExists, flakeFileExists) = filesExists
|
||||||
|
|
||||||
|
nixEnabled = runner.flatMap(_.nix).map(_.enable) match {
|
||||||
|
case Some(flag) => flag
|
||||||
|
case None => flakeFileExists
|
||||||
|
}
|
||||||
|
|
||||||
|
dockerEnabled = runner.flatMap(_.docker).map(_.enable) match {
|
||||||
|
case Some(flag) => flag
|
||||||
|
case None => dockerFileExists
|
||||||
|
}
|
||||||
|
|
||||||
|
trivialEnabled = runner.flatMap(_.trivial).exists(_.enable)
|
||||||
|
|
||||||
|
result = RunnerType.all.filter(_.fold(nixEnabled, dockerEnabled, trivialEnabled))
|
||||||
|
} yield result
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonMeta {
|
||||||
|
|
||||||
|
def empty(name: String, version: String): AddonMeta =
|
||||||
|
AddonMeta(Meta(name, version, None), None, None, None, None)
|
||||||
|
|
||||||
|
case class Meta(name: String, version: String, description: Option[String])
|
||||||
|
case class Runner(
|
||||||
|
nix: Option[NixRunner],
|
||||||
|
docker: Option[DockerRunner],
|
||||||
|
trivial: Option[TrivialRunner]
|
||||||
|
)
|
||||||
|
case class NixRunner(enable: Boolean)
|
||||||
|
case class DockerRunner(enable: Boolean, image: Option[String], build: Option[String])
|
||||||
|
case class TrivialRunner(enable: Boolean, exec: String)
|
||||||
|
case class Options(networking: Boolean, collectOutput: Boolean) {
|
||||||
|
def isPure = !networking && collectOutput
|
||||||
|
def isImpure = networking
|
||||||
|
def isUseless = !networking && !collectOutput
|
||||||
|
def isUseful = networking || collectOutput
|
||||||
|
}
|
||||||
|
|
||||||
|
object NixRunner {
|
||||||
|
implicit val jsonEncoder: Encoder[NixRunner] =
|
||||||
|
deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[NixRunner] =
|
||||||
|
deriveDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
object DockerRunner {
|
||||||
|
implicit val jsonEncoder: Encoder[DockerRunner] =
|
||||||
|
deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[DockerRunner] =
|
||||||
|
deriveDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
object TrivialRunner {
|
||||||
|
implicit val jsonEncoder: Encoder[TrivialRunner] =
|
||||||
|
deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[TrivialRunner] =
|
||||||
|
deriveDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
object Runner {
|
||||||
|
implicit val jsonEncoder: Encoder[Runner] =
|
||||||
|
deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[Runner] =
|
||||||
|
deriveDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
object Options {
|
||||||
|
implicit val jsonEncoder: Encoder[Options] =
|
||||||
|
deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[Options] =
|
||||||
|
deriveDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
object Meta {
|
||||||
|
implicit val jsonEncoder: Encoder[Meta] =
|
||||||
|
deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[Meta] =
|
||||||
|
deriveDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
implicit val jsonEncoder: Encoder[AddonMeta] =
|
||||||
|
deriveEncoder
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[AddonMeta] =
|
||||||
|
deriveDecoder
|
||||||
|
|
||||||
|
def fromJsonString(str: String): Either[Throwable, AddonMeta] =
|
||||||
|
JsonParser.decode[AddonMeta](str)
|
||||||
|
|
||||||
|
def fromJsonBytes[F[_]: Sync](bytes: Stream[F, Byte]): F[AddonMeta] =
|
||||||
|
bytes
|
||||||
|
.through(fs2.text.utf8.decode)
|
||||||
|
.compile
|
||||||
|
.string
|
||||||
|
.map(fromJsonString)
|
||||||
|
.rethrow
|
||||||
|
|
||||||
|
def fromYamlString(str: String): Either[Throwable, AddonMeta] =
|
||||||
|
YamlParser.parse(str).flatMap(_.as[AddonMeta])
|
||||||
|
|
||||||
|
def fromYamlBytes[F[_]: Sync](bytes: Stream[F, Byte]): F[AddonMeta] =
|
||||||
|
bytes
|
||||||
|
.through(fs2.text.utf8.decode)
|
||||||
|
.compile
|
||||||
|
.string
|
||||||
|
.map(fromYamlString)
|
||||||
|
.rethrow
|
||||||
|
|
||||||
|
def findInDirectory[F[_]: Sync: Files](dir: Path): F[AddonMeta] = {
|
||||||
|
val logger = docspell.logging.getLogger[F]
|
||||||
|
val jsonFile = dir / "docspell-addon.json"
|
||||||
|
val yamlFile = dir / "docspell-addon.yaml"
|
||||||
|
val yamlFile2 = dir / "docspell-addon.yml"
|
||||||
|
|
||||||
|
OptionT
|
||||||
|
.liftF(Files[F].exists(jsonFile))
|
||||||
|
.flatTap(OptionT.whenF(_)(logger.debug(s"Reading json addon file $jsonFile")))
|
||||||
|
.flatMap(OptionT.whenF(_)(fromJsonBytes(Files[F].readAll(jsonFile))))
|
||||||
|
.orElse(
|
||||||
|
OptionT
|
||||||
|
.liftF(Files[F].exists(yamlFile))
|
||||||
|
.flatTap(OptionT.whenF(_)(logger.debug(s"Reading yaml addon file $yamlFile")))
|
||||||
|
.flatMap(OptionT.whenF(_)(fromYamlBytes(Files[F].readAll(yamlFile))))
|
||||||
|
)
|
||||||
|
.orElse(
|
||||||
|
OptionT
|
||||||
|
.liftF(Files[F].exists(yamlFile2))
|
||||||
|
.flatTap(OptionT.whenF(_)(logger.debug(s"Reading yaml addon file $yamlFile2")))
|
||||||
|
.flatMap(OptionT.whenF(_)(fromYamlBytes(Files[F].readAll(yamlFile2))))
|
||||||
|
)
|
||||||
|
.getOrElseF(
|
||||||
|
Sync[F].raiseError(
|
||||||
|
new FileNotFoundException(s"No docspell-addon.{yaml|json} file found in $dir!")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def findInZip[F[_]: Async](zipFile: Stream[F, Byte]): F[AddonMeta] = {
|
||||||
|
val fail: F[AddonMeta] = Async[F].raiseError(
|
||||||
|
new FileNotFoundException(
|
||||||
|
s"No docspell-addon.{yaml|json} file found in zip!"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
zipFile
|
||||||
|
.through(Zip.unzip(8192, Glob("**/docspell-addon.*")))
|
||||||
|
.filter(bin => !bin.name.endsWith("/"))
|
||||||
|
.flatMap { bin =>
|
||||||
|
if (bin.extensionIn(Set("json"))) Stream.eval(AddonMeta.fromJsonBytes(bin.data))
|
||||||
|
else if (bin.extensionIn(Set("yaml", "yml")))
|
||||||
|
Stream.eval(AddonMeta.fromYamlBytes(bin.data))
|
||||||
|
else Stream.empty
|
||||||
|
}
|
||||||
|
.take(1)
|
||||||
|
.compile
|
||||||
|
.last
|
||||||
|
.flatMap(_.map(Sync[F].pure).getOrElse(fail))
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,9 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
case class AddonRef(archive: AddonArchive, args: String)
|
@ -0,0 +1,114 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.Monoid
|
||||||
|
|
||||||
|
import docspell.addons.out.AddonOutput
|
||||||
|
|
||||||
|
import io.circe.generic.extras.Configuration
|
||||||
|
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.{Codec, Decoder, Encoder}
|
||||||
|
|
||||||
|
sealed trait AddonResult {
|
||||||
|
def toEither: Either[Throwable, AddonOutput]
|
||||||
|
|
||||||
|
def isSuccess: Boolean = toEither.isRight
|
||||||
|
def isFailure: Boolean = !isSuccess
|
||||||
|
|
||||||
|
def cast: AddonResult = this
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonResult {
|
||||||
|
|
||||||
|
/** The addon was run successful, but decoding its stdout failed. */
|
||||||
|
case class DecodingError(message: String) extends AddonResult {
|
||||||
|
def toEither = Left(new IllegalStateException(message))
|
||||||
|
}
|
||||||
|
object DecodingError {
|
||||||
|
implicit val jsonEncoder: Encoder[DecodingError] = deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[DecodingError] = deriveDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
def decodingError(message: String): AddonResult =
|
||||||
|
DecodingError(message)
|
||||||
|
|
||||||
|
def decodingError(ex: Throwable): AddonResult =
|
||||||
|
DecodingError(ex.getMessage)
|
||||||
|
|
||||||
|
/** Running the addon resulted in an invalid return code (!= 0). */
|
||||||
|
case class ExecutionError(rc: Int) extends AddonResult {
|
||||||
|
def toEither = Left(new IllegalStateException(s"Exit code: $rc"))
|
||||||
|
}
|
||||||
|
|
||||||
|
object ExecutionError {
|
||||||
|
implicit val jsonEncoder: Encoder[ExecutionError] = deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[ExecutionError] = deriveDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
def executionError(rc: Int): AddonResult =
|
||||||
|
ExecutionError(rc)
|
||||||
|
|
||||||
|
/** The execution of the addon failed with an exception. */
|
||||||
|
case class ExecutionFailed(error: Throwable) extends AddonResult {
|
||||||
|
def toEither = Left(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
object ExecutionFailed {
|
||||||
|
implicit val throwableCodec: Codec[Throwable] =
|
||||||
|
Codec.from(
|
||||||
|
Decoder[String].emap(str => Right(ErrorMessageThrowable(str))),
|
||||||
|
Encoder[String].contramap(_.getMessage)
|
||||||
|
)
|
||||||
|
|
||||||
|
implicit val jsonEncoder: Encoder[ExecutionFailed] = deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[ExecutionFailed] = deriveDecoder
|
||||||
|
|
||||||
|
private class ErrorMessageThrowable(msg: String) extends RuntimeException(msg) {
|
||||||
|
override def fillInStackTrace() = this
|
||||||
|
}
|
||||||
|
private object ErrorMessageThrowable {
|
||||||
|
def apply(str: String): Throwable = new ErrorMessageThrowable(str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def executionFailed(error: Throwable): AddonResult =
|
||||||
|
ExecutionFailed(error)
|
||||||
|
|
||||||
|
/** The addon was run successfully and its output was decoded (if any). */
|
||||||
|
case class Success(output: AddonOutput) extends AddonResult {
|
||||||
|
def toEither = Right(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
object Success {
|
||||||
|
implicit val jsonEncoder: Encoder[Success] = deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[Success] = deriveDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
def success(output: AddonOutput): AddonResult =
|
||||||
|
Success(output)
|
||||||
|
|
||||||
|
val empty: AddonResult = Success(AddonOutput.empty)
|
||||||
|
|
||||||
|
def combine(a: AddonResult, b: AddonResult): AddonResult =
|
||||||
|
(a, b) match {
|
||||||
|
case (Success(o1), Success(o2)) => Success(AddonOutput.combine(o1, o2))
|
||||||
|
case (Success(_), e) => e
|
||||||
|
case (e, Success(_)) => e
|
||||||
|
case _ => a
|
||||||
|
}
|
||||||
|
|
||||||
|
implicit val deriveConfig: Configuration =
|
||||||
|
Configuration.default.withDiscriminator("result").withKebabCaseConstructorNames
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[AddonResult] = deriveConfiguredDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[AddonResult] = deriveConfiguredEncoder
|
||||||
|
|
||||||
|
implicit val addonResultMonoid: Monoid[AddonResult] =
|
||||||
|
Monoid.instance(empty, combine)
|
||||||
|
}
|
@ -0,0 +1,110 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.Applicative
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.Stream
|
||||||
|
|
||||||
|
import docspell.addons.runner._
|
||||||
|
import docspell.common.exec.Env
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
trait AddonRunner[F[_]] {
|
||||||
|
def runnerType: List[RunnerType]
|
||||||
|
|
||||||
|
def run(
|
||||||
|
logger: Logger[F],
|
||||||
|
env: Env,
|
||||||
|
ctx: Context
|
||||||
|
): F[AddonResult]
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonRunner {
|
||||||
|
def forType[F[_]: Async](cfg: AddonExecutorConfig)(rt: RunnerType) =
|
||||||
|
rt match {
|
||||||
|
case RunnerType.NixFlake => NixFlakeRunner[F](cfg)
|
||||||
|
case RunnerType.Docker => DockerRunner[F](cfg)
|
||||||
|
case RunnerType.Trivial => TrivialRunner[F](cfg)
|
||||||
|
}
|
||||||
|
|
||||||
|
def failWith[F[_]](errorMsg: String)(implicit F: Applicative[F]): AddonRunner[F] =
|
||||||
|
pure(AddonResult.executionFailed(new Exception(errorMsg)))
|
||||||
|
|
||||||
|
def pure[F[_]: Applicative](result: AddonResult): AddonRunner[F] =
|
||||||
|
new AddonRunner[F] {
|
||||||
|
val runnerType = Nil
|
||||||
|
|
||||||
|
def run(logger: Logger[F], env: Env, ctx: Context) =
|
||||||
|
Applicative[F].pure(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
def firstSuccessful[F[_]: Sync](runners: List[AddonRunner[F]]): AddonRunner[F] =
|
||||||
|
runners match {
|
||||||
|
case Nil => failWith("No runner available!")
|
||||||
|
case a :: Nil => a
|
||||||
|
case _ =>
|
||||||
|
new AddonRunner[F] {
|
||||||
|
val runnerType = runners.flatMap(_.runnerType).distinct
|
||||||
|
|
||||||
|
def run(logger: Logger[F], env: Env, ctx: Context) =
|
||||||
|
Stream
|
||||||
|
.emits(runners)
|
||||||
|
.evalTap(r =>
|
||||||
|
logger.info(
|
||||||
|
s"Attempt to run addon ${ctx.meta.nameAndVersion} with runner ${r.runnerType}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.evalMap(_.run(logger, env, ctx))
|
||||||
|
.flatMap {
|
||||||
|
case r @ AddonResult.Success(_) => Stream.emit(r.cast.some)
|
||||||
|
case r @ AddonResult.ExecutionFailed(ex) =>
|
||||||
|
if (ctx.meta.isPure) {
|
||||||
|
logger.stream
|
||||||
|
.warn(ex)(s"Addon runner failed, try next.")
|
||||||
|
.as(r.cast.some)
|
||||||
|
} else {
|
||||||
|
logger.stream.warn(ex)(s"Addon runner failed!").as(None)
|
||||||
|
}
|
||||||
|
case r @ AddonResult.ExecutionError(rc) =>
|
||||||
|
if (ctx.meta.isPure) {
|
||||||
|
logger.stream
|
||||||
|
.warn(s"Addon runner returned non-zero: $rc. Try next.")
|
||||||
|
.as(r.cast.some)
|
||||||
|
} else {
|
||||||
|
logger.stream.warn(s"Addon runner returned non-zero: $rc!").as(None)
|
||||||
|
}
|
||||||
|
case AddonResult.DecodingError(message) =>
|
||||||
|
// Don't retry as it is very unlikely that the output differs using another runner
|
||||||
|
// This is most likely a bug in the addon
|
||||||
|
logger.stream
|
||||||
|
.warn(
|
||||||
|
s"Error decoding the output of the addon ${ctx.meta.nameAndVersion}: $message. Stopping here. This is likely a bug in the addon."
|
||||||
|
)
|
||||||
|
.as(None)
|
||||||
|
}
|
||||||
|
.unNoneTerminate
|
||||||
|
.takeThrough(_.isFailure)
|
||||||
|
.compile
|
||||||
|
.last
|
||||||
|
.flatMap {
|
||||||
|
case Some(r) => r.pure[F]
|
||||||
|
case None =>
|
||||||
|
AddonResult
|
||||||
|
.executionFailed(new NoSuchElementException("No runner left :("))
|
||||||
|
.pure[F]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def firstSuccessful[F[_]: Sync](
|
||||||
|
runner: AddonRunner[F],
|
||||||
|
runners: AddonRunner[F]*
|
||||||
|
): AddonRunner[F] =
|
||||||
|
firstSuccessful(runner :: runners.toList)
|
||||||
|
}
|
@ -0,0 +1,55 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
sealed trait AddonTriggerType {
|
||||||
|
def name: String
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonTriggerType {
|
||||||
|
|
||||||
|
/** The final step when processing an item. */
|
||||||
|
case object FinalProcessItem extends AddonTriggerType {
|
||||||
|
val name = "final-process-item"
|
||||||
|
}
|
||||||
|
|
||||||
|
/** The final step when reprocessing an item. */
|
||||||
|
case object FinalReprocessItem extends AddonTriggerType {
|
||||||
|
val name = "final-reprocess-item"
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Running periodically based on a schedule. */
|
||||||
|
case object Scheduled extends AddonTriggerType {
|
||||||
|
val name = "scheduled"
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Running (manually) on some existing item. */
|
||||||
|
case object ExistingItem extends AddonTriggerType {
|
||||||
|
val name = "existing-item"
|
||||||
|
}
|
||||||
|
|
||||||
|
val all: NonEmptyList[AddonTriggerType] =
|
||||||
|
NonEmptyList.of(FinalProcessItem, FinalReprocessItem, Scheduled, ExistingItem)
|
||||||
|
|
||||||
|
def fromString(str: String): Either[String, AddonTriggerType] =
|
||||||
|
all
|
||||||
|
.find(e => e.name.equalsIgnoreCase(str))
|
||||||
|
.toRight(s"Invalid addon trigger type: $str")
|
||||||
|
|
||||||
|
def unsafeFromString(str: String): AddonTriggerType =
|
||||||
|
fromString(str).fold(sys.error, identity)
|
||||||
|
|
||||||
|
implicit val jsonEncoder: Encoder[AddonTriggerType] =
|
||||||
|
Encoder.encodeString.contramap(_.name)
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[AddonTriggerType] =
|
||||||
|
Decoder.decodeString.emap(fromString)
|
||||||
|
}
|
@ -0,0 +1,72 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import fs2.io.file.Path
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.common.exec.{Args, Env, SysCmd}
|
||||||
|
|
||||||
|
/** Context a list of addons is executed in.
|
||||||
|
*
|
||||||
|
* Each addon has its own `addonDir`, but all share the same `baseDir` in one run.
|
||||||
|
*/
|
||||||
|
case class Context(
|
||||||
|
addon: AddonRef,
|
||||||
|
meta: AddonMeta,
|
||||||
|
baseDir: Path,
|
||||||
|
addonDir: Path,
|
||||||
|
outputDir: Path,
|
||||||
|
cacheDir: Path
|
||||||
|
) {
|
||||||
|
def userInputFile = Context.userInputFile(baseDir)
|
||||||
|
def tempDir = Context.tempDir(baseDir)
|
||||||
|
|
||||||
|
private[addons] def addonCommand(
|
||||||
|
binary: String,
|
||||||
|
timeout: Duration,
|
||||||
|
relativeToBase: Boolean,
|
||||||
|
outputDir: Option[String],
|
||||||
|
cacheDir: Option[String]
|
||||||
|
): SysCmd = {
|
||||||
|
val execBin = Option
|
||||||
|
.when(relativeToBase)(binary)
|
||||||
|
.getOrElse((baseDir / binary).toString)
|
||||||
|
|
||||||
|
val input = Option
|
||||||
|
.when(relativeToBase)(baseDir.relativize(userInputFile))
|
||||||
|
.getOrElse(userInputFile)
|
||||||
|
|
||||||
|
val allArgs =
|
||||||
|
Args(meta.args.getOrElse(Nil)).append(input)
|
||||||
|
val envAddonDir = Option
|
||||||
|
.when(relativeToBase)(baseDir.relativize(addonDir))
|
||||||
|
.getOrElse(addonDir)
|
||||||
|
val envTmpDir = Option
|
||||||
|
.when(relativeToBase)(baseDir.relativize(tempDir))
|
||||||
|
.getOrElse(tempDir)
|
||||||
|
val outDir = outputDir.getOrElse(this.outputDir.toString)
|
||||||
|
val cache = cacheDir.getOrElse(this.cacheDir.toString)
|
||||||
|
val moreEnv =
|
||||||
|
Env.of(
|
||||||
|
"ADDON_DIR" -> envAddonDir.toString,
|
||||||
|
"TMPDIR" -> envTmpDir.toString,
|
||||||
|
"TMP_DIR" -> envTmpDir.toString,
|
||||||
|
"OUTPUT_DIR" -> outDir,
|
||||||
|
"CACHE_DIR" -> cache
|
||||||
|
)
|
||||||
|
|
||||||
|
SysCmd(execBin, allArgs).withTimeout(timeout).addEnv(moreEnv)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object Context {
|
||||||
|
def userInputFile(base: Path): Path =
|
||||||
|
base / "arguments" / "user-input"
|
||||||
|
def tempDir(base: Path): Path =
|
||||||
|
base / "temp"
|
||||||
|
}
|
@ -0,0 +1,74 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import cats.{Applicative, Monad}
|
||||||
|
import fs2.io.file.{Files, Path, PosixPermissions}
|
||||||
|
|
||||||
|
object Directory {
|
||||||
|
|
||||||
|
def create[F[_]: Files: Applicative](dir: Path): F[Path] =
|
||||||
|
Files[F]
|
||||||
|
.createDirectories(dir, PosixPermissions.fromOctal("777"))
|
||||||
|
.as(dir)
|
||||||
|
|
||||||
|
def createAll[F[_]: Files: Applicative](dir: Path, dirs: Path*): F[Unit] =
|
||||||
|
(dir :: dirs.toList).traverse_(Files[F].createDirectories(_))
|
||||||
|
|
||||||
|
def nonEmpty[F[_]: Files: Sync](dir: Path): F[Boolean] =
|
||||||
|
List(
|
||||||
|
Files[F].isDirectory(dir),
|
||||||
|
Files[F].list(dir).take(1).compile.last.map(_.isDefined)
|
||||||
|
).sequence.map(_.forall(identity))
|
||||||
|
|
||||||
|
def isEmpty[F[_]: Files: Sync](dir: Path): F[Boolean] =
|
||||||
|
nonEmpty(dir).map(b => !b)
|
||||||
|
|
||||||
|
def temp[F[_]: Files](parent: Path, prefix: String): Resource[F, Path] =
|
||||||
|
for {
|
||||||
|
_ <- Resource.eval(Files[F].createDirectories(parent))
|
||||||
|
d <- mkTemp(parent, prefix)
|
||||||
|
} yield d
|
||||||
|
|
||||||
|
def temp2[F[_]: Files](
|
||||||
|
parent: Path,
|
||||||
|
prefix1: String,
|
||||||
|
prefix2: String
|
||||||
|
): Resource[F, (Path, Path)] =
|
||||||
|
for {
|
||||||
|
_ <- Resource.eval(Files[F].createDirectories(parent))
|
||||||
|
a <- mkTemp(parent, prefix1)
|
||||||
|
b <- mkTemp(parent, prefix2)
|
||||||
|
} yield (a, b)
|
||||||
|
|
||||||
|
def createTemp[F[_]: Files: Monad](
|
||||||
|
parent: Path,
|
||||||
|
prefix: String
|
||||||
|
): F[Path] =
|
||||||
|
for {
|
||||||
|
_ <- Files[F].createDirectories(parent)
|
||||||
|
d <- mkTemp_(parent, prefix)
|
||||||
|
} yield d
|
||||||
|
|
||||||
|
private def mkTemp[F[_]: Files](parent: Path, prefix: String): Resource[F, Path] =
|
||||||
|
Files[F]
|
||||||
|
.tempDirectory(
|
||||||
|
parent.some,
|
||||||
|
prefix,
|
||||||
|
PosixPermissions.fromOctal("777")
|
||||||
|
)
|
||||||
|
|
||||||
|
private def mkTemp_[F[_]: Files](parent: Path, prefix: String): F[Path] =
|
||||||
|
Files[F]
|
||||||
|
.createTempDirectory(
|
||||||
|
parent.some,
|
||||||
|
prefix,
|
||||||
|
PosixPermissions.fromOctal("777")
|
||||||
|
)
|
||||||
|
}
|
@ -0,0 +1,32 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.effect.Resource
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import docspell.common.exec.Env
|
||||||
|
|
||||||
|
case class InputEnv(
|
||||||
|
addons: List[AddonRef],
|
||||||
|
baseDir: Path,
|
||||||
|
outputDir: Path,
|
||||||
|
cacheDir: Path,
|
||||||
|
env: Env
|
||||||
|
) {
|
||||||
|
def addEnv(key: String, value: String): InputEnv =
|
||||||
|
copy(env = env.add(key, value))
|
||||||
|
|
||||||
|
def addEnv(vp: (String, String)*): InputEnv =
|
||||||
|
copy(env = env.addAll(vp.toMap))
|
||||||
|
|
||||||
|
def addEnv(vm: Map[String, String]): InputEnv =
|
||||||
|
copy(env = env ++ Env(vm))
|
||||||
|
|
||||||
|
def withTempBase[F[_]: Files]: Resource[F, InputEnv] =
|
||||||
|
Directory.temp(baseDir, "addon-").map(path => copy(baseDir = path))
|
||||||
|
}
|
@ -0,0 +1,43 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.Monad
|
||||||
|
import cats.data.Kleisli
|
||||||
|
import cats.effect.kernel.Sync
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
|
trait Middleware[F[_]] extends (AddonExec[F] => AddonExec[F]) { self =>
|
||||||
|
|
||||||
|
def >>(next: Middleware[F]): Middleware[F] =
|
||||||
|
Middleware(self.andThen(next))
|
||||||
|
}
|
||||||
|
|
||||||
|
object Middleware {
|
||||||
|
def apply[F[_]](f: AddonExec[F] => AddonExec[F]): Middleware[F] =
|
||||||
|
a => f(a)
|
||||||
|
|
||||||
|
def identity[F[_]]: Middleware[F] = Middleware(scala.Predef.identity)
|
||||||
|
|
||||||
|
/** Uses a temporary base dir that is removed after execution. Use this as the last
|
||||||
|
* layer!
|
||||||
|
*/
|
||||||
|
def ephemeralRun[F[_]: Files: Sync]: Middleware[F] =
|
||||||
|
Middleware(a => Kleisli(_.withTempBase.use(a.run)))
|
||||||
|
|
||||||
|
/** Prepare running an addon */
|
||||||
|
def prepare[F[_]: Monad](
|
||||||
|
prep: Kleisli[F, InputEnv, InputEnv]
|
||||||
|
): Middleware[F] =
|
||||||
|
Middleware(a => Kleisli(in => prep.run(in).flatMap(a.run)))
|
||||||
|
|
||||||
|
def postProcess[F[_]: Monad](
|
||||||
|
post: Kleisli[F, AddonExecutionResult, Unit]
|
||||||
|
): Middleware[F] =
|
||||||
|
Middleware(_.flatMapF(r => post.map(_ => r).run(r)))
|
||||||
|
}
|
@ -0,0 +1,69 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
import cats.syntax.all._
|
||||||
|
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
sealed trait RunnerType {
|
||||||
|
def name: String
|
||||||
|
|
||||||
|
def fold[A](
|
||||||
|
nixFlake: => A,
|
||||||
|
docker: => A,
|
||||||
|
trivial: => A
|
||||||
|
): A
|
||||||
|
}
|
||||||
|
object RunnerType {
|
||||||
|
case object NixFlake extends RunnerType {
|
||||||
|
val name = "nix-flake"
|
||||||
|
|
||||||
|
def fold[A](
|
||||||
|
nixFlake: => A,
|
||||||
|
docker: => A,
|
||||||
|
trivial: => A
|
||||||
|
): A = nixFlake
|
||||||
|
}
|
||||||
|
case object Docker extends RunnerType {
|
||||||
|
val name = "docker"
|
||||||
|
|
||||||
|
def fold[A](
|
||||||
|
nixFlake: => A,
|
||||||
|
docker: => A,
|
||||||
|
trivial: => A
|
||||||
|
): A = docker
|
||||||
|
}
|
||||||
|
case object Trivial extends RunnerType {
|
||||||
|
val name = "trivial"
|
||||||
|
|
||||||
|
def fold[A](
|
||||||
|
nixFlake: => A,
|
||||||
|
docker: => A,
|
||||||
|
trivial: => A
|
||||||
|
): A = trivial
|
||||||
|
}
|
||||||
|
|
||||||
|
val all: NonEmptyList[RunnerType] =
|
||||||
|
NonEmptyList.of(NixFlake, Docker, Trivial)
|
||||||
|
|
||||||
|
def fromString(str: String): Either[String, RunnerType] =
|
||||||
|
all.find(_.name.equalsIgnoreCase(str)).toRight(s"Invalid runner value: $str")
|
||||||
|
|
||||||
|
def unsafeFromString(str: String): RunnerType =
|
||||||
|
fromString(str).fold(sys.error, identity)
|
||||||
|
|
||||||
|
def fromSeparatedString(str: String): Either[String, List[RunnerType]] =
|
||||||
|
str.split("[\\s,]+").toList.map(_.trim).traverse(fromString)
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[RunnerType] =
|
||||||
|
Decoder[String].emap(RunnerType.fromString)
|
||||||
|
|
||||||
|
implicit val jsonEncoder: Encoder[RunnerType] =
|
||||||
|
Encoder[String].contramap(_.name)
|
||||||
|
}
|
@ -0,0 +1,44 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons.out
|
||||||
|
|
||||||
|
import cats.kernel.Monoid
|
||||||
|
|
||||||
|
import docspell.common.bc.BackendCommand
|
||||||
|
|
||||||
|
import io.circe.generic.extras.Configuration
|
||||||
|
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
/** Decoded stdout result from executing an addon. */
|
||||||
|
case class AddonOutput(
|
||||||
|
commands: List[BackendCommand] = Nil,
|
||||||
|
files: List[ItemFile] = Nil,
|
||||||
|
newItems: List[NewItem] = Nil
|
||||||
|
)
|
||||||
|
|
||||||
|
object AddonOutput {
|
||||||
|
val empty: AddonOutput = AddonOutput()
|
||||||
|
|
||||||
|
def combine(a: AddonOutput, b: AddonOutput): AddonOutput =
|
||||||
|
AddonOutput(a.commands ++ b.commands, a.files ++ b.files)
|
||||||
|
|
||||||
|
implicit val addonResultMonoid: Monoid[AddonOutput] =
|
||||||
|
Monoid.instance(empty, combine)
|
||||||
|
|
||||||
|
implicit val jsonConfig: Configuration =
|
||||||
|
Configuration.default.withDefaults
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[AddonOutput] = deriveConfiguredDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[AddonOutput] = deriveConfiguredEncoder
|
||||||
|
|
||||||
|
def fromString(str: String): Either[Throwable, AddonOutput] =
|
||||||
|
io.circe.parser.decode[AddonOutput](str)
|
||||||
|
|
||||||
|
def unsafeFromString(str: String): AddonOutput =
|
||||||
|
fromString(str).fold(throw _, identity)
|
||||||
|
}
|
@ -0,0 +1,102 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons.out
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.files.FileSupport._
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
import io.circe.generic.extras.Configuration
|
||||||
|
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
/** Addons can produce files in their output directory. These can be named here in order
|
||||||
|
* to do something with them.
|
||||||
|
*
|
||||||
|
* - textFiles will replace the extracted text with the contents of the file
|
||||||
|
* - pdfFiles will add/replace the converted pdf with the given file
|
||||||
|
* - previewImages will add/replace preview images
|
||||||
|
* - newFiles will be added as new attachments to the item
|
||||||
|
*
|
||||||
|
* Files must be referenced by attachment id.
|
||||||
|
*/
|
||||||
|
final case class ItemFile(
|
||||||
|
itemId: Ident,
|
||||||
|
textFiles: Map[String, String] = Map.empty,
|
||||||
|
pdfFiles: Map[String, String] = Map.empty,
|
||||||
|
previewImages: Map[String, String] = Map.empty,
|
||||||
|
newFiles: List[NewFile] = Nil
|
||||||
|
) {
|
||||||
|
def isEmpty: Boolean =
|
||||||
|
textFiles.isEmpty && pdfFiles.isEmpty && previewImages.isEmpty
|
||||||
|
|
||||||
|
def nonEmpty: Boolean = !isEmpty
|
||||||
|
|
||||||
|
def resolveTextFiles[F[_]: Files: Sync](
|
||||||
|
logger: Logger[F],
|
||||||
|
outputDir: Path
|
||||||
|
): F[List[(String, Path)]] =
|
||||||
|
resolveFiles(logger, outputDir, MimeType.text("*"), textFiles)
|
||||||
|
|
||||||
|
def resolvePdfFiles[F[_]: Files: Sync](
|
||||||
|
logger: Logger[F],
|
||||||
|
outputDir: Path
|
||||||
|
): F[List[(String, Path)]] =
|
||||||
|
resolveFiles(logger, outputDir, MimeType.pdf, pdfFiles)
|
||||||
|
|
||||||
|
def resolvePreviewFiles[F[_]: Files: Sync](
|
||||||
|
logger: Logger[F],
|
||||||
|
outputDir: Path
|
||||||
|
): F[List[(String, Path)]] =
|
||||||
|
resolveFiles(logger, outputDir, MimeType.image("*"), previewImages)
|
||||||
|
|
||||||
|
def resolveNewFiles[F[_]: Files: Sync](
|
||||||
|
logger: Logger[F],
|
||||||
|
outputDir: Path
|
||||||
|
): F[List[(NewFile, Path)]] =
|
||||||
|
newFiles.traverseFilter(nf =>
|
||||||
|
nf.resolveFile(logger, outputDir).map(_.map(p => (nf, p)))
|
||||||
|
)
|
||||||
|
|
||||||
|
private def resolveFiles[F[_]: Files: Sync](
|
||||||
|
logger: Logger[F],
|
||||||
|
outputDir: Path,
|
||||||
|
mime: MimeType,
|
||||||
|
files: Map[String, String]
|
||||||
|
): F[List[(String, Path)]] = {
|
||||||
|
val allFiles =
|
||||||
|
files.toList.map(t => t._1 -> outputDir / t._2)
|
||||||
|
|
||||||
|
allFiles.traverseFilter { case (key, file) =>
|
||||||
|
OptionT(file.detectMime)
|
||||||
|
.flatMapF(fileType =>
|
||||||
|
if (mime.matches(fileType)) (key -> file).some.pure[F]
|
||||||
|
else
|
||||||
|
logger
|
||||||
|
.warn(
|
||||||
|
s"File $file provided as ${mime.asString} file, but was recognized as ${fileType.asString}. Ignoring it."
|
||||||
|
)
|
||||||
|
.as(None: Option[(String, Path)])
|
||||||
|
)
|
||||||
|
.value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object ItemFile {
|
||||||
|
|
||||||
|
implicit val jsonConfig: Configuration =
|
||||||
|
Configuration.default.withDefaults
|
||||||
|
|
||||||
|
implicit val jsonEncoder: Encoder[ItemFile] = deriveConfiguredEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[ItemFile] = deriveConfiguredDecoder
|
||||||
|
}
|
@ -0,0 +1,77 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons.out
|
||||||
|
|
||||||
|
import cats.effect.Sync
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import docspell.addons.out.NewFile.Meta
|
||||||
|
import docspell.common.ProcessItemArgs.ProcessMeta
|
||||||
|
import docspell.common.{Ident, Language}
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
import io.circe.Codec
|
||||||
|
import io.circe.generic.extras.Configuration
|
||||||
|
import io.circe.generic.extras.semiauto.deriveConfiguredCodec
|
||||||
|
import io.circe.generic.semiauto.deriveCodec
|
||||||
|
|
||||||
|
case class NewFile(metadata: Meta = Meta.empty, file: String) {
|
||||||
|
|
||||||
|
def resolveFile[F[_]: Files: Sync](
|
||||||
|
logger: Logger[F],
|
||||||
|
outputDir: Path
|
||||||
|
): F[Option[Path]] = {
|
||||||
|
val target = outputDir / file
|
||||||
|
Files[F]
|
||||||
|
.exists(target)
|
||||||
|
.flatMap(flag =>
|
||||||
|
if (flag) target.some.pure[F]
|
||||||
|
else logger.warn(s"File not found: $file").as(Option.empty)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object NewFile {
|
||||||
|
|
||||||
|
case class Meta(
|
||||||
|
language: Option[Language],
|
||||||
|
skipDuplicate: Option[Boolean],
|
||||||
|
attachmentsOnly: Option[Boolean]
|
||||||
|
) {
|
||||||
|
|
||||||
|
def toProcessMeta(
|
||||||
|
cid: Ident,
|
||||||
|
itemId: Ident,
|
||||||
|
collLang: Option[Language],
|
||||||
|
sourceAbbrev: String
|
||||||
|
): ProcessMeta =
|
||||||
|
ProcessMeta(
|
||||||
|
collective = cid,
|
||||||
|
itemId = Some(itemId),
|
||||||
|
language = language.orElse(collLang).getOrElse(Language.English),
|
||||||
|
direction = None,
|
||||||
|
sourceAbbrev = sourceAbbrev,
|
||||||
|
folderId = None,
|
||||||
|
validFileTypes = Seq.empty,
|
||||||
|
skipDuplicate = skipDuplicate.getOrElse(true),
|
||||||
|
fileFilter = None,
|
||||||
|
tags = None,
|
||||||
|
reprocess = false,
|
||||||
|
attachmentsOnly = attachmentsOnly
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
object Meta {
|
||||||
|
val empty = Meta(None, None, None)
|
||||||
|
implicit val jsonCodec: Codec[Meta] = deriveCodec
|
||||||
|
}
|
||||||
|
|
||||||
|
implicit val jsonConfig: Configuration = Configuration.default.withDefaults
|
||||||
|
|
||||||
|
implicit val jsonCodec: Codec[NewFile] = deriveConfiguredCodec
|
||||||
|
}
|
@ -0,0 +1,92 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons.out
|
||||||
|
|
||||||
|
import cats.Monad
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import docspell.addons.out.NewItem.Meta
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
case class NewItem(metadata: Option[Meta], files: List[String]) {
|
||||||
|
|
||||||
|
def toProcessMeta(
|
||||||
|
cid: Ident,
|
||||||
|
collLang: Option[Language],
|
||||||
|
sourceAbbrev: String
|
||||||
|
): ProcessItemArgs.ProcessMeta =
|
||||||
|
metadata
|
||||||
|
.getOrElse(Meta(None, None, None, None, None, None, None))
|
||||||
|
.toProcessArgs(cid, collLang, sourceAbbrev)
|
||||||
|
|
||||||
|
def resolveFiles[F[_]: Files: Monad](
|
||||||
|
logger: Logger[F],
|
||||||
|
outputDir: Path
|
||||||
|
): F[List[Path]] = {
|
||||||
|
val allFiles =
|
||||||
|
files.map(name => outputDir / name)
|
||||||
|
|
||||||
|
allFiles.traverseFilter { file =>
|
||||||
|
Files[F]
|
||||||
|
.exists(file)
|
||||||
|
.flatMap {
|
||||||
|
case true => file.some.pure[F]
|
||||||
|
case false =>
|
||||||
|
logger
|
||||||
|
.warn(s"File $file doesn't exist. Ignoring it.")
|
||||||
|
.as(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object NewItem {
|
||||||
|
|
||||||
|
case class Meta(
|
||||||
|
language: Option[Language],
|
||||||
|
direction: Option[Direction],
|
||||||
|
folderId: Option[Ident],
|
||||||
|
source: Option[String],
|
||||||
|
skipDuplicate: Option[Boolean],
|
||||||
|
tags: Option[List[String]],
|
||||||
|
attachmentsOnly: Option[Boolean]
|
||||||
|
) {
|
||||||
|
|
||||||
|
def toProcessArgs(
|
||||||
|
cid: Ident,
|
||||||
|
collLang: Option[Language],
|
||||||
|
sourceAbbrev: String
|
||||||
|
): ProcessItemArgs.ProcessMeta =
|
||||||
|
ProcessItemArgs.ProcessMeta(
|
||||||
|
collective = cid,
|
||||||
|
itemId = None,
|
||||||
|
language = language.orElse(collLang).getOrElse(Language.English),
|
||||||
|
direction = direction,
|
||||||
|
sourceAbbrev = source.getOrElse(sourceAbbrev),
|
||||||
|
folderId = folderId,
|
||||||
|
validFileTypes = Seq.empty,
|
||||||
|
skipDuplicate = skipDuplicate.getOrElse(true),
|
||||||
|
fileFilter = None,
|
||||||
|
tags = tags,
|
||||||
|
reprocess = false,
|
||||||
|
attachmentsOnly = attachmentsOnly
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
object Meta {
|
||||||
|
implicit val jsonEncoder: Encoder[Meta] = deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[Meta] = deriveDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[NewItem] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[NewItem] = deriveEncoder
|
||||||
|
}
|
@ -0,0 +1,15 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell
|
||||||
|
|
||||||
|
import cats.data.Kleisli
|
||||||
|
|
||||||
|
package object addons {
|
||||||
|
|
||||||
|
type AddonExec[F[_]] = Kleisli[F, InputEnv, AddonExecutionResult]
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,43 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons.runner
|
||||||
|
|
||||||
|
import cats.Applicative
|
||||||
|
import cats.effect.{Ref, Sync}
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.Pipe
|
||||||
|
|
||||||
|
trait CollectOut[F[_]] {
|
||||||
|
|
||||||
|
def get: F[String]
|
||||||
|
|
||||||
|
def append: Pipe[F, String, String]
|
||||||
|
}
|
||||||
|
|
||||||
|
object CollectOut {
|
||||||
|
|
||||||
|
def none[F[_]: Applicative]: CollectOut[F] =
|
||||||
|
new CollectOut[F] {
|
||||||
|
def get = "".pure[F]
|
||||||
|
def append = identity
|
||||||
|
}
|
||||||
|
|
||||||
|
def buffer[F[_]: Sync]: F[CollectOut[F]] =
|
||||||
|
Ref
|
||||||
|
.of[F, Vector[String]](Vector.empty)
|
||||||
|
.map(buffer =>
|
||||||
|
new CollectOut[F] {
|
||||||
|
override def get =
|
||||||
|
buffer.get.map(_.mkString("\n").trim)
|
||||||
|
|
||||||
|
override def append =
|
||||||
|
_.evalTap(line =>
|
||||||
|
if (line.trim.nonEmpty) buffer.update(_.appended(line)) else ().pure[F]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
@ -0,0 +1,82 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons.runner
|
||||||
|
|
||||||
|
import fs2.io.file.Path
|
||||||
|
|
||||||
|
import docspell.common.Duration
|
||||||
|
import docspell.common.exec.{Args, Env, SysCmd}
|
||||||
|
|
||||||
|
/** Builder for a docker system command. */
|
||||||
|
case class DockerBuilder(
|
||||||
|
dockerBinary: String,
|
||||||
|
subCmd: String,
|
||||||
|
timeout: Duration,
|
||||||
|
containerName: Option[String] = None,
|
||||||
|
env: Env = Env.empty,
|
||||||
|
mounts: Args = Args.empty,
|
||||||
|
network: Option[String] = Some("host"),
|
||||||
|
workingDir: Option[String] = None,
|
||||||
|
imageName: Option[String] = None,
|
||||||
|
cntCmd: Args = Args.empty
|
||||||
|
) {
|
||||||
|
def containerCmd(args: Args): DockerBuilder =
|
||||||
|
copy(cntCmd = args)
|
||||||
|
def containerCmd(args: Seq[String]): DockerBuilder =
|
||||||
|
copy(cntCmd = Args(args))
|
||||||
|
|
||||||
|
def imageName(name: String): DockerBuilder =
|
||||||
|
copy(imageName = Some(name))
|
||||||
|
|
||||||
|
def workDirectory(dir: String): DockerBuilder =
|
||||||
|
copy(workingDir = Some(dir))
|
||||||
|
|
||||||
|
def withDockerBinary(bin: String): DockerBuilder =
|
||||||
|
copy(dockerBinary = bin)
|
||||||
|
|
||||||
|
def withSubCmd(cmd: String): DockerBuilder =
|
||||||
|
copy(subCmd = cmd)
|
||||||
|
|
||||||
|
def withEnv(key: String, value: String): DockerBuilder =
|
||||||
|
copy(env = env.add(key, value))
|
||||||
|
|
||||||
|
def withEnv(moreEnv: Env): DockerBuilder =
|
||||||
|
copy(env = env ++ moreEnv)
|
||||||
|
|
||||||
|
def privateNetwork(flag: Boolean): DockerBuilder =
|
||||||
|
if (flag) copy(network = Some("none"))
|
||||||
|
else copy(network = Some("host"))
|
||||||
|
|
||||||
|
def mount(
|
||||||
|
hostDir: Path,
|
||||||
|
cntDir: Option[String] = None,
|
||||||
|
readOnly: Boolean = true
|
||||||
|
): DockerBuilder = {
|
||||||
|
val target = cntDir.getOrElse(hostDir.toString)
|
||||||
|
val ro = Option.when(readOnly)(",readonly").getOrElse("")
|
||||||
|
val opt = s"type=bind,source=$hostDir,target=$target${ro}"
|
||||||
|
copy(mounts = mounts.append("--mount", opt))
|
||||||
|
}
|
||||||
|
|
||||||
|
def withName(containerName: String): DockerBuilder =
|
||||||
|
copy(containerName = Some(containerName))
|
||||||
|
|
||||||
|
def build: SysCmd =
|
||||||
|
SysCmd(dockerBinary, buildArgs).withTimeout(timeout)
|
||||||
|
|
||||||
|
private def buildArgs: Args =
|
||||||
|
Args
|
||||||
|
.of(subCmd)
|
||||||
|
.append("--rm")
|
||||||
|
.option("--name", containerName)
|
||||||
|
.append(mounts)
|
||||||
|
.option("--network", network)
|
||||||
|
.append(env.mapConcat((k, v) => List("--env", s"${k}=${v}")))
|
||||||
|
.option("-w", workingDir)
|
||||||
|
.appendOpt(imageName)
|
||||||
|
.append(cntCmd)
|
||||||
|
}
|
@ -0,0 +1,93 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons.runner
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
|
||||||
|
import docspell.addons.AddonExecutorConfig.DockerConfig
|
||||||
|
import docspell.addons._
|
||||||
|
import docspell.common.Duration
|
||||||
|
import docspell.common.exec.{Env, SysCmd, SysExec}
|
||||||
|
import docspell.common.util.Random
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
final class DockerRunner[F[_]: Async](cfg: DockerRunner.Config) extends AddonRunner[F] {
|
||||||
|
|
||||||
|
val runnerType = List(RunnerType.Docker)
|
||||||
|
|
||||||
|
def run(
|
||||||
|
logger: Logger[F],
|
||||||
|
env: Env,
|
||||||
|
ctx: Context
|
||||||
|
) = for {
|
||||||
|
_ <- OptionT.whenF(requireBuild(ctx))(build(logger, ctx)).value
|
||||||
|
suffix <- Random[F].string(4)
|
||||||
|
cmd = createDockerCommand(env, ctx, suffix)
|
||||||
|
result <- RunnerUtil.runAddonCommand(logger, cmd, ctx)
|
||||||
|
} yield result
|
||||||
|
|
||||||
|
def createDockerCommand(
|
||||||
|
env: Env,
|
||||||
|
ctx: Context,
|
||||||
|
suffix: String
|
||||||
|
): SysCmd = {
|
||||||
|
val outputPath = "/mnt/output"
|
||||||
|
val cachePath = "/mnt/cache"
|
||||||
|
val addonArgs =
|
||||||
|
ctx.addonCommand(
|
||||||
|
"",
|
||||||
|
Duration.zero,
|
||||||
|
relativeToBase = true,
|
||||||
|
outputPath.some,
|
||||||
|
cachePath.some
|
||||||
|
)
|
||||||
|
|
||||||
|
DockerBuilder(cfg.docker.dockerBinary, "run", cfg.timeout)
|
||||||
|
.withName(ctx.meta.nameAndVersion + "-" + suffix)
|
||||||
|
.withEnv(env)
|
||||||
|
.withEnv(addonArgs.env)
|
||||||
|
.mount(ctx.baseDir, "/mnt/work".some, readOnly = false)
|
||||||
|
.mount(ctx.outputDir, outputPath.some, readOnly = false)
|
||||||
|
.mount(ctx.cacheDir, cachePath.some, readOnly = false)
|
||||||
|
.workDirectory("/mnt/work")
|
||||||
|
.privateNetwork(ctx.meta.isPure)
|
||||||
|
.imageName(imageName(ctx))
|
||||||
|
.containerCmd(addonArgs.args)
|
||||||
|
.build
|
||||||
|
}
|
||||||
|
|
||||||
|
def build(logger: Logger[F], ctx: Context): F[Unit] =
|
||||||
|
for {
|
||||||
|
_ <- logger.info(s"Building docker image for addon ${ctx.meta.nameAndVersion}")
|
||||||
|
cmd = cfg.docker.dockerBuild(imageName(ctx))
|
||||||
|
_ <- SysExec(cmd, logger, ctx.addonDir.some)
|
||||||
|
.flatMap(_.logOutputs(logger, "docker build"))
|
||||||
|
.use(_.waitFor())
|
||||||
|
_ <- logger.info(s"Docker image built successfully")
|
||||||
|
} yield ()
|
||||||
|
|
||||||
|
private def requireBuild(ctx: Context) =
|
||||||
|
ctx.meta.runner
|
||||||
|
.flatMap(_.docker)
|
||||||
|
.flatMap(_.image)
|
||||||
|
.isEmpty
|
||||||
|
|
||||||
|
private def imageName(ctx: Context): String =
|
||||||
|
ctx.meta.runner
|
||||||
|
.flatMap(_.docker)
|
||||||
|
.flatMap(_.image)
|
||||||
|
.getOrElse(s"${ctx.meta.meta.name}:latest")
|
||||||
|
}
|
||||||
|
|
||||||
|
object DockerRunner {
|
||||||
|
def apply[F[_]: Async](cfg: AddonExecutorConfig): DockerRunner[F] =
|
||||||
|
new DockerRunner[F](Config(cfg.dockerRunner, cfg.runTimeout))
|
||||||
|
|
||||||
|
case class Config(docker: DockerConfig, timeout: Duration)
|
||||||
|
}
|
@ -0,0 +1,83 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons.runner
|
||||||
|
|
||||||
|
import fs2.io.file.Path
|
||||||
|
|
||||||
|
import docspell.common.exec.{Args, Env, SysCmd}
|
||||||
|
|
||||||
|
case class NSpawnBuilder(
|
||||||
|
child: SysCmd,
|
||||||
|
chroot: Path,
|
||||||
|
spawnBinary: String = "systemd-nspawn",
|
||||||
|
sudoBinary: String = "sudo",
|
||||||
|
args: Args = Args.empty,
|
||||||
|
env: Env = Env.empty
|
||||||
|
) {
|
||||||
|
|
||||||
|
def withNSpawnBinary(bin: String): NSpawnBuilder =
|
||||||
|
copy(spawnBinary = bin)
|
||||||
|
|
||||||
|
def withSudoBinary(bin: String): NSpawnBuilder =
|
||||||
|
copy(sudoBinary = bin)
|
||||||
|
|
||||||
|
def withEnv(key: String, value: String): NSpawnBuilder =
|
||||||
|
copy(args = args.append(s"--setenv=$key=$value"))
|
||||||
|
|
||||||
|
def withEnvOpt(key: String, value: Option[String]): NSpawnBuilder =
|
||||||
|
value.map(v => withEnv(key, v)).getOrElse(this)
|
||||||
|
|
||||||
|
def withName(containerName: String): NSpawnBuilder =
|
||||||
|
copy(args = args.append(s"--machine=$containerName"))
|
||||||
|
|
||||||
|
def mount(
|
||||||
|
hostDir: Path,
|
||||||
|
cntDir: Option[String] = None,
|
||||||
|
readOnly: Boolean = true
|
||||||
|
): NSpawnBuilder = {
|
||||||
|
val bind = if (readOnly) "--bind-ro" else "--bind"
|
||||||
|
val target = cntDir.map(dir => s":$dir").getOrElse("")
|
||||||
|
copy(args = args.append(s"${bind}=${hostDir}${target}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
def workDirectory(dir: String): NSpawnBuilder =
|
||||||
|
copy(args = args.append(s"--chdir=$dir"))
|
||||||
|
|
||||||
|
def portMap(port: Int): NSpawnBuilder =
|
||||||
|
copy(args = args.append("-p", port.toString))
|
||||||
|
|
||||||
|
def privateNetwork(flag: Boolean): NSpawnBuilder =
|
||||||
|
if (flag) copy(args = args.append("--private-network"))
|
||||||
|
else this
|
||||||
|
|
||||||
|
def build: SysCmd =
|
||||||
|
SysCmd(
|
||||||
|
program = if (sudoBinary.nonEmpty) sudoBinary else spawnBinary,
|
||||||
|
args = buildArgs,
|
||||||
|
timeout = child.timeout,
|
||||||
|
env = env
|
||||||
|
)
|
||||||
|
|
||||||
|
private def buildArgs: Args =
|
||||||
|
Args
|
||||||
|
.of("--private-users=identity") // can't use -U because need writeable bind mounts
|
||||||
|
.append("--notify-ready=yes")
|
||||||
|
.append("--ephemeral")
|
||||||
|
.append("--as-pid2")
|
||||||
|
.append("--console=pipe")
|
||||||
|
.append("--no-pager")
|
||||||
|
.append("--bind-ro=/bin")
|
||||||
|
.append("--bind-ro=/usr/bin")
|
||||||
|
.append("--bind-ro=/nix/store")
|
||||||
|
.append(s"--directory=$chroot")
|
||||||
|
.append(args)
|
||||||
|
.append(child.env.map((n, v) => s"--setenv=$n=$v"))
|
||||||
|
.prependWhen(sudoBinary.nonEmpty)(spawnBinary)
|
||||||
|
.append("--")
|
||||||
|
.append(child.program)
|
||||||
|
.append(child.args)
|
||||||
|
}
|
@ -0,0 +1,123 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons.runner
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.Stream
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import docspell.addons.AddonExecutorConfig.{NSpawn, NixConfig}
|
||||||
|
import docspell.addons._
|
||||||
|
import docspell.addons.runner.NixFlakeRunner.PreCtx
|
||||||
|
import docspell.common.Duration
|
||||||
|
import docspell.common.exec._
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
final class NixFlakeRunner[F[_]: Async](cfg: NixFlakeRunner.Config)
|
||||||
|
extends AddonRunner[F] {
|
||||||
|
|
||||||
|
val runnerType = List(RunnerType.NixFlake)
|
||||||
|
|
||||||
|
def run(
|
||||||
|
logger: Logger[F],
|
||||||
|
env: Env,
|
||||||
|
ctx: Context
|
||||||
|
): F[AddonResult] =
|
||||||
|
prepare(logger, ctx)
|
||||||
|
.flatMap { preCtx =>
|
||||||
|
if (preCtx.nspawnEnabled) runInContainer(logger, env, preCtx, ctx)
|
||||||
|
else runOnHost(logger, env, preCtx, ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
def prepare(logger: Logger[F], ctx: Context): F[PreCtx] =
|
||||||
|
for {
|
||||||
|
_ <- logger.info(s"Prepare addon ${ctx.meta.nameAndVersion} for executing via nix")
|
||||||
|
_ <- logger.debug(s"Building with nix build")
|
||||||
|
_ <- SysExec(cfg.nixBuild, logger, workdir = ctx.addonDir.some)
|
||||||
|
.flatMap(_.logOutputs(logger, "nix build"))
|
||||||
|
.use(_.waitFor())
|
||||||
|
bin <- findFile(ctx.addonDir / "result" / "bin", ctx.addonDir / "result")
|
||||||
|
_ <- logger.debug(s"Build done, found binary: $bin")
|
||||||
|
_ <- logger.debug(s"Checking for systemd-nspawn…")
|
||||||
|
cnt <- checkContainer(logger)
|
||||||
|
_ <-
|
||||||
|
if (cnt)
|
||||||
|
logger.debug(s"Using systemd-nspawn to run addon in a container.")
|
||||||
|
else
|
||||||
|
logger.info(s"Running via systemd-nspawn is disabled in the config file")
|
||||||
|
} yield PreCtx(cnt, ctx.baseDir.relativize(bin))
|
||||||
|
|
||||||
|
private def checkContainer(logger: Logger[F]): F[Boolean] =
|
||||||
|
if (!cfg.nspawn.enabled) false.pure[F]
|
||||||
|
else RunnerUtil.checkContainer(logger, cfg.nspawn)
|
||||||
|
|
||||||
|
private def runOnHost(
|
||||||
|
logger: Logger[F],
|
||||||
|
env: Env,
|
||||||
|
preCtx: PreCtx,
|
||||||
|
ctx: Context
|
||||||
|
): F[AddonResult] = {
|
||||||
|
val cmd =
|
||||||
|
SysCmd(preCtx.binary.toString, Args.empty).withTimeout(cfg.timeout).addEnv(env)
|
||||||
|
RunnerUtil.runDirectly(logger, ctx)(cmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
private def runInContainer(
|
||||||
|
logger: Logger[F],
|
||||||
|
env: Env,
|
||||||
|
preCtx: PreCtx,
|
||||||
|
ctx: Context
|
||||||
|
): F[AddonResult] = {
|
||||||
|
val cmd = SysCmd(preCtx.binary.toString, Args.empty)
|
||||||
|
.withTimeout(cfg.timeout)
|
||||||
|
.addEnv(env)
|
||||||
|
RunnerUtil.runInContainer(logger, cfg.nspawn, ctx)(cmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Find first file, try directories in given order. */
|
||||||
|
private def findFile(firstDir: Path, more: Path*): F[Path] = {
|
||||||
|
val fail: F[Path] = Sync[F].raiseError(
|
||||||
|
new NoSuchElementException(
|
||||||
|
s"No file found to execute in ${firstDir :: more.toList}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
Stream
|
||||||
|
.emits(more)
|
||||||
|
.cons1(firstDir)
|
||||||
|
.flatMap(dir =>
|
||||||
|
Files[F]
|
||||||
|
.list(dir)
|
||||||
|
.evalFilter(p => Files[F].isDirectory(p).map(!_))
|
||||||
|
.take(1)
|
||||||
|
)
|
||||||
|
.take(1)
|
||||||
|
.compile
|
||||||
|
.last
|
||||||
|
.flatMap(_.fold(fail)(Sync[F].pure))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object NixFlakeRunner {
|
||||||
|
def apply[F[_]: Async](cfg: AddonExecutorConfig): NixFlakeRunner[F] =
|
||||||
|
new NixFlakeRunner[F](Config(cfg.nixRunner, cfg.nspawn, cfg.runTimeout))
|
||||||
|
|
||||||
|
case class Config(
|
||||||
|
nix: NixConfig,
|
||||||
|
nspawn: NSpawn,
|
||||||
|
timeout: Duration
|
||||||
|
) {
|
||||||
|
|
||||||
|
val nixBuild =
|
||||||
|
SysCmd(nix.nixBinary, Args.of("build")).withTimeout(nix.buildTimeout)
|
||||||
|
|
||||||
|
val nspawnVersion = nspawn.nspawnVersion
|
||||||
|
}
|
||||||
|
|
||||||
|
case class PreCtx(nspawnEnabled: Boolean, binary: Path)
|
||||||
|
}
|
@ -0,0 +1,171 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons.runner
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect.{Async, Sync}
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.Pipe
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
|
import docspell.addons._
|
||||||
|
import docspell.addons.out.AddonOutput
|
||||||
|
import docspell.common.exec.{SysCmd, SysExec}
|
||||||
|
import docspell.common.util.Random
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
import io.circe.{parser => JsonParser}
|
||||||
|
|
||||||
|
private[addons] object RunnerUtil {
|
||||||
|
|
||||||
|
/** Run the given `cmd` on this machine.
|
||||||
|
*
|
||||||
|
* The `cmd` is containing a template command to execute the addon. The path are
|
||||||
|
* expected to be relative to the `ctx.baseDir`. Additional arguments and environment
|
||||||
|
* variables are added as configured in the addon.
|
||||||
|
*/
|
||||||
|
def runDirectly[F[_]: Async](
|
||||||
|
logger: Logger[F],
|
||||||
|
ctx: Context
|
||||||
|
)(cmd: SysCmd): F[AddonResult] = {
|
||||||
|
val addonCmd = ctx
|
||||||
|
.addonCommand(cmd.program, cmd.timeout, relativeToBase = false, None, None)
|
||||||
|
.withArgs(_.append(cmd.args))
|
||||||
|
.addEnv(cmd.env)
|
||||||
|
runAddonCommand(logger, addonCmd, ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Run the given `cmd` inside a container via systemd-nspawn.
|
||||||
|
*
|
||||||
|
* The `cmd` is containing a template command to execute the addon. The path are
|
||||||
|
* expected to be relative to the `ctx.baseDir`. Additional arguments and environment
|
||||||
|
* variables are added as configured in the addon.
|
||||||
|
*/
|
||||||
|
def runInContainer[F[_]: Async](
|
||||||
|
logger: Logger[F],
|
||||||
|
cfg: AddonExecutorConfig.NSpawn,
|
||||||
|
ctx: Context
|
||||||
|
)(cmd: SysCmd): F[AddonResult] = {
|
||||||
|
val outputPath = "/mnt/output"
|
||||||
|
val cachePath = "/mnt/cache"
|
||||||
|
val addonCmd = ctx
|
||||||
|
.addonCommand(
|
||||||
|
cmd.program,
|
||||||
|
cmd.timeout,
|
||||||
|
relativeToBase = true,
|
||||||
|
outputPath.some,
|
||||||
|
cachePath.some
|
||||||
|
)
|
||||||
|
.withArgs(_.append(cmd.args))
|
||||||
|
.addEnv(cmd.env)
|
||||||
|
|
||||||
|
val chroot = ctx.baseDir / "cnt-root"
|
||||||
|
val nspawn = NSpawnBuilder(addonCmd, chroot)
|
||||||
|
.withNSpawnBinary(cfg.nspawnBinary)
|
||||||
|
.withSudoBinary(cfg.sudoBinary)
|
||||||
|
.mount(ctx.baseDir, "/mnt/work".some, readOnly = false)
|
||||||
|
.mount(ctx.cacheDir, cachePath.some, readOnly = false)
|
||||||
|
.mount(ctx.outputDir, outputPath.some, readOnly = false)
|
||||||
|
.workDirectory("/mnt/work")
|
||||||
|
.withEnv("XDG_RUNTIME_DIR", "/mnt/work")
|
||||||
|
.privateNetwork(ctx.meta.isPure)
|
||||||
|
|
||||||
|
for {
|
||||||
|
suffix <- Random[F].string(4)
|
||||||
|
_ <- List(chroot).traverse_(Files[F].createDirectories)
|
||||||
|
res <- runAddonCommand(
|
||||||
|
logger,
|
||||||
|
nspawn.withName(ctx.meta.nameAndVersion + "-" + suffix).build,
|
||||||
|
ctx
|
||||||
|
)
|
||||||
|
// allow some time to unregister the current container
|
||||||
|
// only important when same addons are called in sequence too fast
|
||||||
|
_ <- Sync[F].sleep(cfg.containerWait.toScala)
|
||||||
|
} yield res
|
||||||
|
}
|
||||||
|
|
||||||
|
private def procPipe[F[_]](
|
||||||
|
p: String,
|
||||||
|
ctx: Context,
|
||||||
|
collect: CollectOut[F],
|
||||||
|
logger: Logger[F]
|
||||||
|
): Pipe[F, String, Unit] =
|
||||||
|
_.through(collect.append)
|
||||||
|
.map(line => s">> [${ctx.meta.nameAndVersion} ($p)] $line")
|
||||||
|
.evalMap(logger.debug(_))
|
||||||
|
|
||||||
|
/** Runs the external command that is executing the addon.
|
||||||
|
*
|
||||||
|
* If the addons specifies to collect its output, the stdout is parsed as json and
|
||||||
|
* decoded into [[AddonOutput]].
|
||||||
|
*/
|
||||||
|
def runAddonCommand[F[_]: Async](
|
||||||
|
logger: Logger[F],
|
||||||
|
cmd: SysCmd,
|
||||||
|
ctx: Context
|
||||||
|
): F[AddonResult] =
|
||||||
|
for {
|
||||||
|
stdout <-
|
||||||
|
if (ctx.meta.options.exists(_.collectOutput)) CollectOut.buffer[F]
|
||||||
|
else CollectOut.none[F].pure[F]
|
||||||
|
cmdResult <- SysExec(cmd, logger, ctx.baseDir.some)
|
||||||
|
.flatMap(
|
||||||
|
_.consumeOutputs(
|
||||||
|
procPipe("out", ctx, stdout, logger),
|
||||||
|
procPipe("err", ctx, CollectOut.none[F], logger)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.use(_.waitFor())
|
||||||
|
.attempt
|
||||||
|
addonResult <- cmdResult match {
|
||||||
|
case Right(rc) if rc != 0 =>
|
||||||
|
for {
|
||||||
|
_ <- logger.error(
|
||||||
|
s"Addon ${ctx.meta.nameAndVersion} returned non-zero: $rc"
|
||||||
|
)
|
||||||
|
} yield AddonResult.executionError(rc)
|
||||||
|
|
||||||
|
case Right(_) =>
|
||||||
|
for {
|
||||||
|
_ <- logger.debug(s"Addon ${ctx.meta.nameAndVersion} executed successfully!")
|
||||||
|
out <- stdout.get
|
||||||
|
_ <- logger.debug(s"Addon stdout: $out")
|
||||||
|
result = Option
|
||||||
|
.when(ctx.meta.options.exists(_.collectOutput) && out.nonEmpty)(
|
||||||
|
JsonParser
|
||||||
|
.decode[AddonOutput](out)
|
||||||
|
.fold(AddonResult.decodingError, AddonResult.success)
|
||||||
|
)
|
||||||
|
.getOrElse(AddonResult.empty)
|
||||||
|
} yield result
|
||||||
|
|
||||||
|
case Left(ex) =>
|
||||||
|
logger
|
||||||
|
.error(ex)(s"Executing external command failed!")
|
||||||
|
.as(AddonResult.executionFailed(ex))
|
||||||
|
}
|
||||||
|
} yield addonResult
|
||||||
|
|
||||||
|
/** Check whether `systemd-nspawn` is available on this machine. */
|
||||||
|
def checkContainer[F[_]: Async](
|
||||||
|
logger: Logger[F],
|
||||||
|
cfg: AddonExecutorConfig.NSpawn
|
||||||
|
): F[Boolean] =
|
||||||
|
for {
|
||||||
|
rc <- SysExec(cfg.nspawnVersion, logger)
|
||||||
|
.flatMap(_.logOutputs(logger, "nspawn"))
|
||||||
|
.use(_.waitFor())
|
||||||
|
_ <-
|
||||||
|
OptionT
|
||||||
|
.whenF(rc != 0)(
|
||||||
|
logger.warn(
|
||||||
|
s"No systemd-nspawn found! Addon is not executed inside a container."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.value
|
||||||
|
} yield rc == 0
|
||||||
|
}
|
@ -0,0 +1,78 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons.runner
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect._
|
||||||
|
import cats.kernel.Monoid
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.PosixPermission._
|
||||||
|
import fs2.io.file.{Files, PosixPermissions}
|
||||||
|
|
||||||
|
import docspell.addons.AddonExecutorConfig.NSpawn
|
||||||
|
import docspell.addons._
|
||||||
|
import docspell.common.Duration
|
||||||
|
import docspell.common.exec.{Args, Env, SysCmd}
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
final class TrivialRunner[F[_]: Async](cfg: TrivialRunner.Config) extends AddonRunner[F] {
|
||||||
|
private val sync = Async[F]
|
||||||
|
private val files = Files[F]
|
||||||
|
implicit val andMonoid: Monoid[Boolean] = Monoid.instance[Boolean](true, _ && _)
|
||||||
|
|
||||||
|
private val executeBits = PosixPermissions(
|
||||||
|
OwnerExecute,
|
||||||
|
OwnerRead,
|
||||||
|
OwnerWrite,
|
||||||
|
GroupExecute,
|
||||||
|
GroupRead,
|
||||||
|
OthersExecute,
|
||||||
|
OthersRead
|
||||||
|
)
|
||||||
|
|
||||||
|
val runnerType = List(RunnerType.Trivial)
|
||||||
|
|
||||||
|
def run(
|
||||||
|
logger: Logger[F],
|
||||||
|
env: Env,
|
||||||
|
ctx: Context
|
||||||
|
) = {
|
||||||
|
val binaryPath = ctx.meta.runner
|
||||||
|
.flatMap(_.trivial)
|
||||||
|
.map(_.exec)
|
||||||
|
.map(bin => ctx.addonDir / bin)
|
||||||
|
|
||||||
|
binaryPath match {
|
||||||
|
case None =>
|
||||||
|
sync.raiseError(new IllegalStateException("No executable specified in addon!"))
|
||||||
|
|
||||||
|
case Some(file) =>
|
||||||
|
val bin = ctx.baseDir.relativize(file)
|
||||||
|
val cmd = SysCmd(bin.toString, Args.empty).withTimeout(cfg.timeout).addEnv(env)
|
||||||
|
|
||||||
|
val withNSpawn =
|
||||||
|
OptionT
|
||||||
|
.whenF(cfg.nspawn.enabled)(RunnerUtil.checkContainer(logger, cfg.nspawn))
|
||||||
|
.getOrElse(false)
|
||||||
|
|
||||||
|
files.setPosixPermissions(file, executeBits).attempt *>
|
||||||
|
withNSpawn.flatMap {
|
||||||
|
case true =>
|
||||||
|
RunnerUtil.runInContainer(logger, cfg.nspawn, ctx)(cmd)
|
||||||
|
case false =>
|
||||||
|
RunnerUtil.runDirectly(logger, ctx)(cmd)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object TrivialRunner {
|
||||||
|
def apply[F[_]: Async](cfg: AddonExecutorConfig): TrivialRunner[F] =
|
||||||
|
new TrivialRunner[F](Config(cfg.nspawn, cfg.runTimeout))
|
||||||
|
|
||||||
|
case class Config(nspawn: NSpawn, timeout: Duration)
|
||||||
|
}
|
Binary file not shown.
@ -0,0 +1,44 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.option._
|
||||||
|
|
||||||
|
import docspell.common.UrlReader
|
||||||
|
import docspell.logging.TestLoggingConfig
|
||||||
|
|
||||||
|
import munit._
|
||||||
|
|
||||||
|
class AddonArchiveTest extends CatsEffectSuite with TestLoggingConfig with Fixtures {
|
||||||
|
val logger = docspell.logging.getLogger[IO]
|
||||||
|
|
||||||
|
tempDir.test("Read archive from directory") { dir =>
|
||||||
|
for {
|
||||||
|
archive <- IO(AddonArchive(dummyAddonUrl, "", ""))
|
||||||
|
path <- archive.extractTo[IO](UrlReader.defaultReader[IO], dir)
|
||||||
|
|
||||||
|
aa <- AddonArchive.read[IO](dummyAddonUrl, UrlReader.defaultReader[IO], path.some)
|
||||||
|
_ = {
|
||||||
|
assertEquals(aa.name, "dummy-addon")
|
||||||
|
assertEquals(aa.version, "2.9")
|
||||||
|
assertEquals(aa.url, dummyAddonUrl)
|
||||||
|
}
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
test("Read archive from zip file") {
|
||||||
|
for {
|
||||||
|
archive <- AddonArchive.read[IO](dummyAddonUrl, UrlReader.defaultReader[IO])
|
||||||
|
_ = {
|
||||||
|
assertEquals(archive.name, "dummy-addon")
|
||||||
|
assertEquals(archive.version, "2.9")
|
||||||
|
assertEquals(archive.url, dummyAddonUrl)
|
||||||
|
}
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,63 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
|
||||||
|
import docspell.logging.{Level, TestLoggingConfig}
|
||||||
|
|
||||||
|
import munit._
|
||||||
|
|
||||||
|
class AddonExecutorTest extends CatsEffectSuite with Fixtures with TestLoggingConfig {
|
||||||
|
val logger = docspell.logging.getLogger[IO]
|
||||||
|
|
||||||
|
override def docspellLogConfig =
|
||||||
|
super.docspellLogConfig.copy(minimumLevel = Level.Trace)
|
||||||
|
|
||||||
|
tempDir.test("select docker if Dockerfile exists") { dir =>
|
||||||
|
for {
|
||||||
|
_ <- files.createFile(dir / "Dockerfile")
|
||||||
|
cfg = testExecutorConfig(
|
||||||
|
RunnerType.Docker,
|
||||||
|
RunnerType.NixFlake,
|
||||||
|
RunnerType.Trivial
|
||||||
|
)
|
||||||
|
meta = dummyAddonMeta.copy(runner = None)
|
||||||
|
r <- AddonExecutor.selectRunner[IO](cfg, meta, dir)
|
||||||
|
_ = assertEquals(r.runnerType, List(RunnerType.Docker))
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
tempDir.test("select nix-flake if flake.nix exists") { dir =>
|
||||||
|
for {
|
||||||
|
_ <- files.createFile(dir / "flake.nix")
|
||||||
|
cfg = testExecutorConfig(
|
||||||
|
RunnerType.Docker,
|
||||||
|
RunnerType.NixFlake,
|
||||||
|
RunnerType.Trivial
|
||||||
|
)
|
||||||
|
meta = dummyAddonMeta.copy(runner = None)
|
||||||
|
r <- AddonExecutor.selectRunner[IO](cfg, meta, dir)
|
||||||
|
_ = assertEquals(r.runnerType, List(RunnerType.NixFlake))
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
tempDir.test("select nix-flake and docker") { dir =>
|
||||||
|
for {
|
||||||
|
_ <- files.createFile(dir / "flake.nix")
|
||||||
|
_ <- files.createFile(dir / "Dockerfile")
|
||||||
|
cfg = testExecutorConfig(
|
||||||
|
RunnerType.Docker,
|
||||||
|
RunnerType.NixFlake,
|
||||||
|
RunnerType.Trivial
|
||||||
|
)
|
||||||
|
meta = dummyAddonMeta.copy(runner = None)
|
||||||
|
r <- AddonExecutor.selectRunner[IO](cfg, meta, dir)
|
||||||
|
_ = assertEquals(r.runnerType, List(RunnerType.Docker, RunnerType.NixFlake))
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,37 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
|
||||||
|
import docspell.common.Glob
|
||||||
|
import docspell.files.Zip
|
||||||
|
import docspell.logging.TestLoggingConfig
|
||||||
|
|
||||||
|
import munit._
|
||||||
|
|
||||||
|
class AddonMetaTest extends CatsEffectSuite with TestLoggingConfig with Fixtures {
|
||||||
|
val logger = docspell.logging.getLogger[IO]
|
||||||
|
|
||||||
|
test("read meta from zip file") {
|
||||||
|
val meta = AddonMeta.findInZip(dummyAddonUrl.readURL[IO](8192))
|
||||||
|
assertIO(meta, dummyAddonMeta)
|
||||||
|
}
|
||||||
|
|
||||||
|
tempDir.test("read meta from directory") { dir =>
|
||||||
|
for {
|
||||||
|
_ <- dummyAddonUrl
|
||||||
|
.readURL[IO](8192)
|
||||||
|
.through(Zip.unzip(8192, Glob.all))
|
||||||
|
.through(Zip.saveTo(logger, dir, moveUp = true))
|
||||||
|
.compile
|
||||||
|
.drain
|
||||||
|
meta <- AddonMeta.findInDirectory[IO](dir)
|
||||||
|
_ = assertEquals(meta, dummyAddonMeta)
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,40 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import docspell.addons.out.AddonOutput
|
||||||
|
|
||||||
|
import io.circe.parser.decode
|
||||||
|
import munit.FunSuite
|
||||||
|
|
||||||
|
class AddonOutputTest extends FunSuite {
|
||||||
|
|
||||||
|
test("decode empty object") {
|
||||||
|
val out = decode[AddonOutput]("{}")
|
||||||
|
println(out)
|
||||||
|
}
|
||||||
|
|
||||||
|
test("decode sample output") {
|
||||||
|
val jsonStr =
|
||||||
|
"""{ "files": [
|
||||||
|
| {
|
||||||
|
| "itemId": "qZDnyGIAJsXr",
|
||||||
|
| "textFiles": {
|
||||||
|
| "HPFvIDib6eA": "HPFvIDib6eA.txt"
|
||||||
|
| },
|
||||||
|
| "pdfFiles": {
|
||||||
|
| "HPFvIDib6eA": "HPFvIDib6eA.pdf"
|
||||||
|
| }
|
||||||
|
| }
|
||||||
|
| ]
|
||||||
|
|}
|
||||||
|
|""".stripMargin
|
||||||
|
|
||||||
|
val out = decode[AddonOutput](jsonStr)
|
||||||
|
println(out)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,98 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger
|
||||||
|
|
||||||
|
import cats.effect.IO
|
||||||
|
import fs2.io.file.Path
|
||||||
|
|
||||||
|
import docspell.common.LenientUri
|
||||||
|
import docspell.common.exec.Env
|
||||||
|
import docspell.logging.{Logger, TestLoggingConfig}
|
||||||
|
|
||||||
|
import munit._
|
||||||
|
|
||||||
|
class AddonRunnerTest extends CatsEffectSuite with TestLoggingConfig {
|
||||||
|
|
||||||
|
val logger = docspell.logging.getLogger[IO]
|
||||||
|
|
||||||
|
val dummyContext = Context(
|
||||||
|
addon = AddonRef(AddonArchive(LenientUri.unsafe("http://test"), "", ""), ""),
|
||||||
|
meta = AddonMeta.empty("test", "1.0"),
|
||||||
|
baseDir = Path(""),
|
||||||
|
addonDir = Path(""),
|
||||||
|
outputDir = Path(""),
|
||||||
|
cacheDir = Path("")
|
||||||
|
)
|
||||||
|
|
||||||
|
test("firstSuccessful must stop on first success") {
|
||||||
|
val counter = new AtomicInteger(0)
|
||||||
|
val runner = new MockRunner(IO(counter.incrementAndGet()).void)
|
||||||
|
val r = AddonRunner.firstSuccessful(runner, runner, runner)
|
||||||
|
for {
|
||||||
|
_ <- r.run(logger, Env.empty, dummyContext)
|
||||||
|
_ = assertEquals(counter.get(), 1)
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
test("firstSuccessful must try with next on error") {
|
||||||
|
val counter = new AtomicInteger(0)
|
||||||
|
val fail = AddonRunner.failWith[IO]("failed")
|
||||||
|
val runner: AddonRunner[IO] = new MockRunner(IO(counter.incrementAndGet()).void)
|
||||||
|
val r = AddonRunner.firstSuccessful(fail, runner, runner)
|
||||||
|
for {
|
||||||
|
_ <- r.run(logger, Env.empty, dummyContext)
|
||||||
|
_ = assertEquals(counter.get(), 1)
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
test("do not retry on decoding errors") {
|
||||||
|
val counter = new AtomicInteger(0)
|
||||||
|
val fail = AddonRunner.pure[IO](AddonResult.decodingError("Decoding failed"))
|
||||||
|
val increment: AddonRunner[IO] = new MockRunner(IO(counter.incrementAndGet()).void)
|
||||||
|
|
||||||
|
val r = AddonRunner.firstSuccessful(fail, increment, increment)
|
||||||
|
for {
|
||||||
|
_ <- r.run(logger, Env.empty, dummyContext)
|
||||||
|
_ = assertEquals(counter.get(), 0)
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
test("try on errors but stop on decoding error") {
|
||||||
|
val counter = new AtomicInteger(0)
|
||||||
|
val decodeFail = AddonRunner.pure[IO](AddonResult.decodingError("Decoding failed"))
|
||||||
|
val incrementFail =
|
||||||
|
new MockRunner(IO(counter.incrementAndGet()).void)
|
||||||
|
.as(AddonResult.executionFailed(new Exception("fail")))
|
||||||
|
val increment: AddonRunner[IO] = new MockRunner(IO(counter.incrementAndGet()).void)
|
||||||
|
|
||||||
|
val r = AddonRunner.firstSuccessful(
|
||||||
|
incrementFail,
|
||||||
|
incrementFail,
|
||||||
|
decodeFail,
|
||||||
|
increment,
|
||||||
|
increment
|
||||||
|
)
|
||||||
|
for {
|
||||||
|
_ <- r.run(logger, Env.empty, dummyContext)
|
||||||
|
_ = assertEquals(counter.get(), 2)
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
final class MockRunner(run: IO[Unit], result: AddonResult = AddonResult.empty)
|
||||||
|
extends AddonRunner[IO] {
|
||||||
|
val runnerType = Nil
|
||||||
|
def run(
|
||||||
|
logger: Logger[IO],
|
||||||
|
env: Env,
|
||||||
|
ctx: Context
|
||||||
|
) = run.as(result)
|
||||||
|
|
||||||
|
def as(r: AddonResult) = new MockRunner(run, r)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,71 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.addons
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.{Files, Path, PosixPermissions}
|
||||||
|
|
||||||
|
import docspell.addons.AddonExecutorConfig._
|
||||||
|
import docspell.addons.AddonMeta._
|
||||||
|
import docspell.addons.AddonTriggerType._
|
||||||
|
import docspell.common.{Duration, LenientUri}
|
||||||
|
import docspell.logging.TestLoggingConfig
|
||||||
|
|
||||||
|
import munit.CatsEffectSuite
|
||||||
|
|
||||||
|
trait Fixtures extends TestLoggingConfig { self: CatsEffectSuite =>
|
||||||
|
|
||||||
|
val files: Files[IO] = Files[IO]
|
||||||
|
|
||||||
|
val dummyAddonUrl =
|
||||||
|
LenientUri.fromJava(getClass.getResource("/docspell-dummy-addon-master.zip"))
|
||||||
|
|
||||||
|
val dummyAddonMeta =
|
||||||
|
AddonMeta(
|
||||||
|
meta =
|
||||||
|
AddonMeta.Meta("dummy-addon", "2.9", "Some dummy addon only for testing.\n".some),
|
||||||
|
triggers = Some(
|
||||||
|
Set(Scheduled, FinalProcessItem, FinalReprocessItem)
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
runner = Runner(
|
||||||
|
nix = NixRunner(true).some,
|
||||||
|
docker = DockerRunner(
|
||||||
|
enable = true,
|
||||||
|
image = None,
|
||||||
|
build = "Dockerfile".some
|
||||||
|
).some,
|
||||||
|
trivial = TrivialRunner(true, "src/addon.sh").some
|
||||||
|
).some,
|
||||||
|
options = Options(networking = true, collectOutput = true).some
|
||||||
|
)
|
||||||
|
|
||||||
|
def baseTempDir: Path =
|
||||||
|
Path(s"/tmp/target/test-temp")
|
||||||
|
|
||||||
|
val tempDir =
|
||||||
|
ResourceFixture[Path](
|
||||||
|
Resource.eval(Files[IO].createDirectories(baseTempDir)) *>
|
||||||
|
Files[IO]
|
||||||
|
.tempDirectory(baseTempDir.some, "run-", PosixPermissions.fromOctal("777"))
|
||||||
|
)
|
||||||
|
|
||||||
|
def testExecutorConfig(
|
||||||
|
runner: RunnerType,
|
||||||
|
runners: RunnerType*
|
||||||
|
): AddonExecutorConfig = {
|
||||||
|
val nspawn = NSpawn(true, "sudo", "systemd-nspawn", Duration.millis(100))
|
||||||
|
AddonExecutorConfig(
|
||||||
|
runner :: runners.toList,
|
||||||
|
Duration.minutes(2),
|
||||||
|
nspawn,
|
||||||
|
NixConfig("nix", Duration.minutes(2)),
|
||||||
|
DockerConfig("docker", Duration.minutes(2))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
@ -15,8 +15,8 @@ import fs2.io.file.{Files, Path}
|
|||||||
import docspell.analysis.classifier
|
import docspell.analysis.classifier
|
||||||
import docspell.analysis.classifier.TextClassifier._
|
import docspell.analysis.classifier.TextClassifier._
|
||||||
import docspell.analysis.nlp.Properties
|
import docspell.analysis.nlp.Properties
|
||||||
import docspell.common._
|
|
||||||
import docspell.common.syntax.FileSyntax._
|
import docspell.common.syntax.FileSyntax._
|
||||||
|
import docspell.common.util.File
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
|
|
||||||
import edu.stanford.nlp.classify.ColumnDataClassifier
|
import edu.stanford.nlp.classify.ColumnDataClassifier
|
||||||
|
@ -14,6 +14,7 @@ import cats.implicits._
|
|||||||
|
|
||||||
import docspell.analysis.NlpSettings
|
import docspell.analysis.NlpSettings
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
import docspell.common.util.File
|
||||||
|
|
||||||
/** Creating the StanfordCoreNLP pipeline is quite expensive as it involves IO and
|
/** Creating the StanfordCoreNLP pipeline is quite expensive as it involves IO and
|
||||||
* initializing large objects.
|
* initializing large objects.
|
||||||
|
@ -17,6 +17,7 @@ import fs2.io.file.Files
|
|||||||
|
|
||||||
import docspell.analysis.classifier.TextClassifier.Data
|
import docspell.analysis.classifier.TextClassifier.Data
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
import docspell.common.util.File
|
||||||
import docspell.logging.TestLoggingConfig
|
import docspell.logging.TestLoggingConfig
|
||||||
|
|
||||||
import munit._
|
import munit._
|
||||||
|
@ -13,6 +13,7 @@ import cats.effect.unsafe.implicits.global
|
|||||||
|
|
||||||
import docspell.analysis.Env
|
import docspell.analysis.Env
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
import docspell.common.util.File
|
||||||
import docspell.files.TestFiles
|
import docspell.files.TestFiles
|
||||||
import docspell.logging.TestLoggingConfig
|
import docspell.logging.TestLoggingConfig
|
||||||
|
|
||||||
|
@ -20,6 +20,7 @@ trait AttachedEvent[R] {
|
|||||||
|
|
||||||
object AttachedEvent {
|
object AttachedEvent {
|
||||||
|
|
||||||
|
/** Only the result, no events. */
|
||||||
def only[R](v: R): AttachedEvent[R] =
|
def only[R](v: R): AttachedEvent[R] =
|
||||||
new AttachedEvent[R] {
|
new AttachedEvent[R] {
|
||||||
val value = v
|
val value = v
|
||||||
|
@ -8,11 +8,14 @@ package docspell.backend
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
|
|
||||||
|
import docspell.backend.BackendCommands.EventContext
|
||||||
import docspell.backend.auth.Login
|
import docspell.backend.auth.Login
|
||||||
import docspell.backend.fulltext.CreateIndex
|
import docspell.backend.fulltext.CreateIndex
|
||||||
import docspell.backend.ops._
|
import docspell.backend.ops._
|
||||||
import docspell.backend.signup.OSignup
|
import docspell.backend.signup.OSignup
|
||||||
|
import docspell.common.bc.BackendCommandRunner
|
||||||
import docspell.ftsclient.FtsClient
|
import docspell.ftsclient.FtsClient
|
||||||
|
import docspell.joexapi.client.JoexClient
|
||||||
import docspell.notification.api.{EventExchange, NotificationModule}
|
import docspell.notification.api.{EventExchange, NotificationModule}
|
||||||
import docspell.pubsub.api.PubSubT
|
import docspell.pubsub.api.PubSubT
|
||||||
import docspell.scheduler.JobStoreModule
|
import docspell.scheduler.JobStoreModule
|
||||||
@ -20,6 +23,7 @@ import docspell.store.Store
|
|||||||
import docspell.totp.Totp
|
import docspell.totp.Totp
|
||||||
|
|
||||||
import emil.Emil
|
import emil.Emil
|
||||||
|
import org.http4s.client.Client
|
||||||
|
|
||||||
trait BackendApp[F[_]] {
|
trait BackendApp[F[_]] {
|
||||||
|
|
||||||
@ -35,6 +39,7 @@ trait BackendApp[F[_]] {
|
|||||||
def job: OJob[F]
|
def job: OJob[F]
|
||||||
def item: OItem[F]
|
def item: OItem[F]
|
||||||
def itemSearch: OItemSearch[F]
|
def itemSearch: OItemSearch[F]
|
||||||
|
def attachment: OAttachment[F]
|
||||||
def fulltext: OFulltext[F]
|
def fulltext: OFulltext[F]
|
||||||
def mail: OMail[F]
|
def mail: OMail[F]
|
||||||
def joex: OJoex[F]
|
def joex: OJoex[F]
|
||||||
@ -52,23 +57,30 @@ trait BackendApp[F[_]] {
|
|||||||
def fileRepository: OFileRepository[F]
|
def fileRepository: OFileRepository[F]
|
||||||
def itemLink: OItemLink[F]
|
def itemLink: OItemLink[F]
|
||||||
def downloadAll: ODownloadAll[F]
|
def downloadAll: ODownloadAll[F]
|
||||||
|
def addons: OAddons[F]
|
||||||
|
|
||||||
|
def commands(eventContext: Option[EventContext]): BackendCommandRunner[F, Unit]
|
||||||
}
|
}
|
||||||
|
|
||||||
object BackendApp {
|
object BackendApp {
|
||||||
|
|
||||||
def create[F[_]: Async](
|
def create[F[_]: Async](
|
||||||
|
cfg: Config,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
javaEmil: Emil[F],
|
javaEmil: Emil[F],
|
||||||
|
httpClient: Client[F],
|
||||||
ftsClient: FtsClient[F],
|
ftsClient: FtsClient[F],
|
||||||
pubSubT: PubSubT[F],
|
pubSubT: PubSubT[F],
|
||||||
schedulerModule: JobStoreModule[F],
|
schedulerModule: JobStoreModule[F],
|
||||||
notificationMod: NotificationModule[F]
|
notificationMod: NotificationModule[F]
|
||||||
): Resource[F, BackendApp[F]] =
|
): Resource[F, BackendApp[F]] =
|
||||||
for {
|
for {
|
||||||
|
nodeImpl <- ONode(store)
|
||||||
totpImpl <- OTotp(store, Totp.default)
|
totpImpl <- OTotp(store, Totp.default)
|
||||||
loginImpl <- Login[F](store, Totp.default)
|
loginImpl <- Login[F](store, Totp.default)
|
||||||
signupImpl <- OSignup[F](store)
|
signupImpl <- OSignup[F](store)
|
||||||
joexImpl <- OJoex(pubSubT)
|
joexClient = JoexClient(httpClient)
|
||||||
|
joexImpl <- OJoex(pubSubT, nodeImpl, joexClient)
|
||||||
collImpl <- OCollective[F](
|
collImpl <- OCollective[F](
|
||||||
store,
|
store,
|
||||||
schedulerModule.userTasks,
|
schedulerModule.userTasks,
|
||||||
@ -80,7 +92,6 @@ object BackendApp {
|
|||||||
equipImpl <- OEquipment[F](store)
|
equipImpl <- OEquipment[F](store)
|
||||||
orgImpl <- OOrganization(store)
|
orgImpl <- OOrganization(store)
|
||||||
uploadImpl <- OUpload(store, schedulerModule.jobs)
|
uploadImpl <- OUpload(store, schedulerModule.jobs)
|
||||||
nodeImpl <- ONode(store)
|
|
||||||
jobImpl <- OJob(store, joexImpl, pubSubT)
|
jobImpl <- OJob(store, joexImpl, pubSubT)
|
||||||
createIndex <- CreateIndex.resource(ftsClient, store)
|
createIndex <- CreateIndex.resource(ftsClient, store)
|
||||||
itemImpl <- OItem(store, ftsClient, createIndex, schedulerModule.jobs)
|
itemImpl <- OItem(store, ftsClient, createIndex, schedulerModule.jobs)
|
||||||
@ -109,6 +120,16 @@ object BackendApp {
|
|||||||
fileRepoImpl <- OFileRepository(store, schedulerModule.jobs)
|
fileRepoImpl <- OFileRepository(store, schedulerModule.jobs)
|
||||||
itemLinkImpl <- Resource.pure(OItemLink(store, itemSearchImpl))
|
itemLinkImpl <- Resource.pure(OItemLink(store, itemSearchImpl))
|
||||||
downloadAllImpl <- Resource.pure(ODownloadAll(store, jobImpl, schedulerModule.jobs))
|
downloadAllImpl <- Resource.pure(ODownloadAll(store, jobImpl, schedulerModule.jobs))
|
||||||
|
attachImpl <- Resource.pure(OAttachment(store, ftsClient, schedulerModule.jobs))
|
||||||
|
addonsImpl <- Resource.pure(
|
||||||
|
OAddons(
|
||||||
|
cfg.addons,
|
||||||
|
store,
|
||||||
|
schedulerModule.userTasks,
|
||||||
|
schedulerModule.jobs,
|
||||||
|
joexImpl
|
||||||
|
)
|
||||||
|
)
|
||||||
} yield new BackendApp[F] {
|
} yield new BackendApp[F] {
|
||||||
val pubSub = pubSubT
|
val pubSub = pubSubT
|
||||||
val login = loginImpl
|
val login = loginImpl
|
||||||
@ -139,5 +160,10 @@ object BackendApp {
|
|||||||
val fileRepository = fileRepoImpl
|
val fileRepository = fileRepoImpl
|
||||||
val itemLink = itemLinkImpl
|
val itemLink = itemLinkImpl
|
||||||
val downloadAll = downloadAllImpl
|
val downloadAll = downloadAllImpl
|
||||||
|
val addons = addonsImpl
|
||||||
|
val attachment = attachImpl
|
||||||
|
|
||||||
|
def commands(eventContext: Option[EventContext]) =
|
||||||
|
BackendCommands.fromBackend(this, eventContext)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,175 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend
|
||||||
|
|
||||||
|
import cats.data.{NonEmptyList => Nel}
|
||||||
|
import cats.effect.Sync
|
||||||
|
import cats.syntax.all._
|
||||||
|
|
||||||
|
import docspell.backend.BackendCommands.EventContext
|
||||||
|
import docspell.backend.ops.OCustomFields.SetValue
|
||||||
|
import docspell.backend.ops._
|
||||||
|
import docspell.common.bc._
|
||||||
|
import docspell.common.{AccountId, Ident, LenientUri}
|
||||||
|
|
||||||
|
private[backend] class BackendCommands[F[_]: Sync](
|
||||||
|
itemOps: OItem[F],
|
||||||
|
attachOps: OAttachment[F],
|
||||||
|
fieldOps: OCustomFields[F],
|
||||||
|
notificationOps: ONotification[F],
|
||||||
|
eventContext: Option[EventContext]
|
||||||
|
) extends BackendCommandRunner[F, Unit] {
|
||||||
|
private[this] val logger = docspell.logging.getLogger[F]
|
||||||
|
|
||||||
|
def run(collective: Ident, cmd: BackendCommand): F[Unit] =
|
||||||
|
doRun(collective, cmd).attempt.flatMap {
|
||||||
|
case Right(_) => ().pure[F]
|
||||||
|
case Left(ex) =>
|
||||||
|
logger.error(ex)(s"Backend command $cmd failed for collective ${collective.id}.")
|
||||||
|
}
|
||||||
|
|
||||||
|
def doRun(collective: Ident, cmd: BackendCommand): F[Unit] =
|
||||||
|
cmd match {
|
||||||
|
case BackendCommand.ItemUpdate(item, actions) =>
|
||||||
|
actions.traverse_(a => runItemAction(collective, item, a))
|
||||||
|
|
||||||
|
case BackendCommand.AttachmentUpdate(item, attach, actions) =>
|
||||||
|
actions.traverse_(a => runAttachAction(collective, item, attach, a))
|
||||||
|
}
|
||||||
|
|
||||||
|
def runAll(collective: Ident, cmds: List[BackendCommand]): F[Unit] =
|
||||||
|
cmds.traverse_(run(collective, _))
|
||||||
|
|
||||||
|
def runItemAction(collective: Ident, item: Ident, action: ItemAction): F[Unit] =
|
||||||
|
action match {
|
||||||
|
case ItemAction.AddTags(tags) =>
|
||||||
|
logger.debug(s"Setting tags $tags on ${item.id} for ${collective.id}") *>
|
||||||
|
itemOps
|
||||||
|
.linkTags(item, tags.toList, collective)
|
||||||
|
.flatMap(sendEvents)
|
||||||
|
|
||||||
|
case ItemAction.RemoveTags(tags) =>
|
||||||
|
logger.debug(s"Remove tags $tags on ${item.id} for ${collective.id}") *>
|
||||||
|
itemOps
|
||||||
|
.removeTagsMultipleItems(Nel.of(item), tags.toList, collective)
|
||||||
|
.flatMap(sendEvents)
|
||||||
|
|
||||||
|
case ItemAction.ReplaceTags(tags) =>
|
||||||
|
logger.debug(s"Replace tags $tags on ${item.id} for ${collective.id}") *>
|
||||||
|
itemOps
|
||||||
|
.setTags(item, tags.toList, collective)
|
||||||
|
.flatMap(sendEvents)
|
||||||
|
|
||||||
|
case ItemAction.SetFolder(folder) =>
|
||||||
|
logger.debug(s"Set folder $folder on ${item.id} for ${collective.id}") *>
|
||||||
|
itemOps
|
||||||
|
.setFolder(item, folder, collective)
|
||||||
|
.void
|
||||||
|
|
||||||
|
case ItemAction.RemoveTagsCategory(cats) =>
|
||||||
|
logger.debug(
|
||||||
|
s"Remove tags in categories $cats on ${item.id} for ${collective.id}"
|
||||||
|
) *>
|
||||||
|
itemOps
|
||||||
|
.removeTagsOfCategories(item, collective, cats)
|
||||||
|
.flatMap(sendEvents)
|
||||||
|
|
||||||
|
case ItemAction.SetCorrOrg(id) =>
|
||||||
|
logger.debug(
|
||||||
|
s"Set correspondent organization ${id.map(_.id)} for ${collective.id}"
|
||||||
|
) *>
|
||||||
|
itemOps.setCorrOrg(Nel.of(item), id, collective).void
|
||||||
|
|
||||||
|
case ItemAction.SetCorrPerson(id) =>
|
||||||
|
logger.debug(
|
||||||
|
s"Set correspondent person ${id.map(_.id)} for ${collective.id}"
|
||||||
|
) *>
|
||||||
|
itemOps.setCorrPerson(Nel.of(item), id, collective).void
|
||||||
|
|
||||||
|
case ItemAction.SetConcPerson(id) =>
|
||||||
|
logger.debug(
|
||||||
|
s"Set concerning person ${id.map(_.id)} for ${collective.id}"
|
||||||
|
) *>
|
||||||
|
itemOps.setConcPerson(Nel.of(item), id, collective).void
|
||||||
|
|
||||||
|
case ItemAction.SetConcEquipment(id) =>
|
||||||
|
logger.debug(
|
||||||
|
s"Set concerning equipment ${id.map(_.id)} for ${collective.id}"
|
||||||
|
) *>
|
||||||
|
itemOps.setConcEquip(Nel.of(item), id, collective).void
|
||||||
|
|
||||||
|
case ItemAction.SetField(field, value) =>
|
||||||
|
logger.debug(
|
||||||
|
s"Set field on item ${item.id} ${field.id} to '$value' for ${collective.id}"
|
||||||
|
) *>
|
||||||
|
fieldOps
|
||||||
|
.setValue(item, SetValue(field, value, collective))
|
||||||
|
.flatMap(sendEvents)
|
||||||
|
|
||||||
|
case ItemAction.SetNotes(notes) =>
|
||||||
|
logger.debug(s"Set notes on item ${item.id} for ${collective.id}") *>
|
||||||
|
itemOps.setNotes(item, notes, collective).void
|
||||||
|
|
||||||
|
case ItemAction.AddNotes(notes, sep) =>
|
||||||
|
logger.debug(s"Add notes on item ${item.id} for ${collective.id}") *>
|
||||||
|
itemOps.addNotes(item, notes, sep, collective).void
|
||||||
|
|
||||||
|
case ItemAction.SetName(name) =>
|
||||||
|
logger.debug(s"Set name '$name' on item ${item.id} for ${collective.id}") *>
|
||||||
|
itemOps.setName(item, name, collective).void
|
||||||
|
}
|
||||||
|
|
||||||
|
def runAttachAction(
|
||||||
|
collective: Ident,
|
||||||
|
itemId: Ident,
|
||||||
|
attachId: Ident,
|
||||||
|
action: AttachmentAction
|
||||||
|
): F[Unit] =
|
||||||
|
action match {
|
||||||
|
case AttachmentAction.SetExtractedText(text) =>
|
||||||
|
attachOps.setExtractedText(
|
||||||
|
collective,
|
||||||
|
itemId,
|
||||||
|
attachId,
|
||||||
|
text.getOrElse("").pure[F]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private def sendEvents(result: AttachedEvent[_]): F[Unit] =
|
||||||
|
eventContext match {
|
||||||
|
case Some(ctx) =>
|
||||||
|
notificationOps.offerEvents(result.event(ctx.account, ctx.baseUrl))
|
||||||
|
case None => ().pure[F]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object BackendCommands {
|
||||||
|
|
||||||
|
/** If supplied, notification events will be send. */
|
||||||
|
case class EventContext(account: AccountId, baseUrl: Option[LenientUri])
|
||||||
|
|
||||||
|
def fromBackend[F[_]: Sync](
|
||||||
|
backendApp: BackendApp[F],
|
||||||
|
eventContext: Option[EventContext] = None
|
||||||
|
): BackendCommandRunner[F, Unit] =
|
||||||
|
new BackendCommands[F](
|
||||||
|
backendApp.item,
|
||||||
|
backendApp.attachment,
|
||||||
|
backendApp.customFields,
|
||||||
|
backendApp.notification,
|
||||||
|
eventContext
|
||||||
|
)
|
||||||
|
|
||||||
|
def apply[F[_]: Sync](
|
||||||
|
item: OItem[F],
|
||||||
|
attachment: OAttachment[F],
|
||||||
|
fields: OCustomFields[F],
|
||||||
|
notification: ONotification[F],
|
||||||
|
eventContext: Option[EventContext] = None
|
||||||
|
): BackendCommandRunner[F, Unit] =
|
||||||
|
new BackendCommands[F](item, attachment, fields, notification, eventContext)
|
||||||
|
}
|
@ -20,7 +20,8 @@ case class Config(
|
|||||||
mailDebug: Boolean,
|
mailDebug: Boolean,
|
||||||
jdbc: JdbcConfig,
|
jdbc: JdbcConfig,
|
||||||
signup: SignupConfig,
|
signup: SignupConfig,
|
||||||
files: Config.Files
|
files: Config.Files,
|
||||||
|
addons: Config.Addons
|
||||||
) {
|
) {
|
||||||
|
|
||||||
def mailSettings: Settings =
|
def mailSettings: Settings =
|
||||||
@ -66,4 +67,21 @@ object Config {
|
|||||||
(storesEmpty |+| defaultStorePresent).map(_ => this)
|
(storesEmpty |+| defaultStorePresent).map(_ => this)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case class Addons(
|
||||||
|
enabled: Boolean,
|
||||||
|
allowImpure: Boolean,
|
||||||
|
allowedUrls: UrlMatcher,
|
||||||
|
deniedUrls: UrlMatcher
|
||||||
|
) {
|
||||||
|
def isAllowed(url: LenientUri): Boolean =
|
||||||
|
allowedUrls.matches(url) && !deniedUrls.matches(url)
|
||||||
|
|
||||||
|
def isDenied(url: LenientUri): Boolean =
|
||||||
|
!isAllowed(url)
|
||||||
|
}
|
||||||
|
object Addons {
|
||||||
|
val disabled: Addons =
|
||||||
|
Addons(false, false, UrlMatcher.False, UrlMatcher.True)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,26 @@ import docspell.notification.api.PeriodicQueryArgs
|
|||||||
import docspell.scheduler.Job
|
import docspell.scheduler.Job
|
||||||
|
|
||||||
object JobFactory extends MailAddressCodec {
|
object JobFactory extends MailAddressCodec {
|
||||||
|
def existingItemAddon[F[_]: Sync](
|
||||||
|
args: ItemAddonTaskArgs,
|
||||||
|
submitter: AccountId
|
||||||
|
): F[Job[ItemAddonTaskArgs]] =
|
||||||
|
Job.createNew(
|
||||||
|
ItemAddonTaskArgs.taskName,
|
||||||
|
submitter.collective,
|
||||||
|
args,
|
||||||
|
"Run addons on item",
|
||||||
|
submitter.user,
|
||||||
|
Priority.High,
|
||||||
|
args.addonRunConfigs
|
||||||
|
.map(_.take(23))
|
||||||
|
.toList
|
||||||
|
.sorted
|
||||||
|
.foldLeft(args.itemId)(_ / _)
|
||||||
|
.take(250)
|
||||||
|
.some
|
||||||
|
)
|
||||||
|
|
||||||
def downloadZip[F[_]: Sync](
|
def downloadZip[F[_]: Sync](
|
||||||
args: DownloadZipArgs,
|
args: DownloadZipArgs,
|
||||||
summaryId: Ident,
|
summaryId: Ident,
|
||||||
|
@ -45,7 +45,14 @@ object CreateIndex {
|
|||||||
chunkSize: Int
|
chunkSize: Int
|
||||||
): F[Unit] = {
|
): F[Unit] = {
|
||||||
val attachs = store
|
val attachs = store
|
||||||
.transact(QAttachment.allAttachmentMetaAndName(collective, itemIds, chunkSize))
|
.transact(
|
||||||
|
QAttachment.allAttachmentMetaAndName(
|
||||||
|
collective,
|
||||||
|
itemIds,
|
||||||
|
ItemState.validStates,
|
||||||
|
chunkSize
|
||||||
|
)
|
||||||
|
)
|
||||||
.map(caa =>
|
.map(caa =>
|
||||||
TextData
|
TextData
|
||||||
.attachment(
|
.attachment(
|
||||||
|
@ -0,0 +1,17 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.joex
|
||||||
|
|
||||||
|
import fs2.io.file.Path
|
||||||
|
|
||||||
|
import docspell.addons.AddonExecutorConfig
|
||||||
|
|
||||||
|
final case class AddonEnvConfig(
|
||||||
|
workingDir: Path,
|
||||||
|
cacheDir: Path,
|
||||||
|
executorConfig: AddonExecutorConfig
|
||||||
|
)
|
@ -0,0 +1,199 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.joex
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
|
||||||
|
import docspell.addons._
|
||||||
|
import docspell.backend.joex.AddonOps.{AddonRunConfigRef, ExecResult}
|
||||||
|
import docspell.backend.ops.OAttachment
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.common.bc.BackendCommandRunner
|
||||||
|
import docspell.common.exec.Env
|
||||||
|
import docspell.logging.Logger
|
||||||
|
import docspell.scheduler.JobStore
|
||||||
|
import docspell.store.Store
|
||||||
|
import docspell.store.file.FileUrlReader
|
||||||
|
import docspell.store.records.AddonRunConfigResolved
|
||||||
|
|
||||||
|
trait AddonOps[F[_]] {
|
||||||
|
|
||||||
|
def execAll(
|
||||||
|
collective: Ident,
|
||||||
|
trigger: Set[AddonTriggerType],
|
||||||
|
runConfigIds: Set[Ident],
|
||||||
|
logger: Option[Logger[F]]
|
||||||
|
)(
|
||||||
|
middleware: Middleware[F]
|
||||||
|
): F[ExecResult]
|
||||||
|
|
||||||
|
def execById(collective: Ident, runConfigId: Ident, logger: Logger[F])(
|
||||||
|
middleware: Middleware[F]
|
||||||
|
): F[ExecResult]
|
||||||
|
|
||||||
|
/** Find enabled addon run config references to be executed. Can be additionally
|
||||||
|
* filtered by given ids and triggers.
|
||||||
|
*/
|
||||||
|
def findAddonRefs(
|
||||||
|
collective: Ident,
|
||||||
|
trigger: Set[AddonTriggerType],
|
||||||
|
runConfigIds: Set[Ident]
|
||||||
|
): F[List[AddonRunConfigRef]]
|
||||||
|
|
||||||
|
/** Find enabled addon run config reference given an addon task id */
|
||||||
|
def findAddonRef(collective: Ident, runConfigId: Ident): F[Option[AddonRunConfigRef]]
|
||||||
|
|
||||||
|
/** Creates an executor for addons given a configuration. */
|
||||||
|
def getExecutor(cfg: AddonExecutorConfig): F[AddonExecutor[F]]
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonOps {
|
||||||
|
case class AddonRunConfigRef(
|
||||||
|
id: Ident,
|
||||||
|
collective: Ident,
|
||||||
|
userId: Option[Ident],
|
||||||
|
name: String,
|
||||||
|
refs: List[AddonRef]
|
||||||
|
)
|
||||||
|
|
||||||
|
object AddonRunConfigRef {
|
||||||
|
def fromResolved(r: AddonRunConfigResolved): AddonRunConfigRef =
|
||||||
|
AddonRunConfigRef(
|
||||||
|
r.config.id,
|
||||||
|
r.config.cid,
|
||||||
|
r.config.userId,
|
||||||
|
r.config.name,
|
||||||
|
r.refs.map(ref => AddonRef(ref.archive.asArchive, ref.ref.args))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
case class ExecResult(
|
||||||
|
result: List[AddonExecutionResult],
|
||||||
|
runConfigs: List[AddonRunConfigRef]
|
||||||
|
) {
|
||||||
|
lazy val combined = result.combineAll
|
||||||
|
}
|
||||||
|
|
||||||
|
object ExecResult {
|
||||||
|
def runConfigNotFound(id: Ident): ExecResult =
|
||||||
|
ExecResult(
|
||||||
|
AddonExecutionResult(
|
||||||
|
AddonResult.executionFailed(
|
||||||
|
new Exception(s"Addon run config ${id.id} not found.")
|
||||||
|
) :: Nil,
|
||||||
|
false
|
||||||
|
) :: Nil,
|
||||||
|
Nil
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def apply[F[_]: Async](
|
||||||
|
cfg: AddonEnvConfig,
|
||||||
|
store: Store[F],
|
||||||
|
cmdRunner: BackendCommandRunner[F, Unit],
|
||||||
|
attachment: OAttachment[F],
|
||||||
|
jobStore: JobStore[F]
|
||||||
|
): AddonOps[F] =
|
||||||
|
new AddonOps[F] with LoggerExtension {
|
||||||
|
private[this] val logger = docspell.logging.getLogger[F]
|
||||||
|
|
||||||
|
private val urlReader = FileUrlReader(store.fileRepo)
|
||||||
|
private val postProcess = AddonPostProcess(cmdRunner, store, attachment, jobStore)
|
||||||
|
private val prepare = new AddonPrepare[F](store)
|
||||||
|
|
||||||
|
def execAll(
|
||||||
|
collective: Ident,
|
||||||
|
trigger: Set[AddonTriggerType],
|
||||||
|
runConfigIds: Set[Ident],
|
||||||
|
logger: Option[Logger[F]]
|
||||||
|
)(
|
||||||
|
custom: Middleware[F]
|
||||||
|
): F[ExecResult] =
|
||||||
|
for {
|
||||||
|
runCfgs <- findAddonRefs(collective, trigger, runConfigIds)
|
||||||
|
log = logger.getOrElse(this.logger)
|
||||||
|
_ <- log.info(s"Running ${runCfgs.size} addon tasks for trigger $trigger")
|
||||||
|
|
||||||
|
results <- runCfgs.traverse(r => execRunConfig(log, r, custom))
|
||||||
|
} yield ExecResult(results.flatMap(_.result), runCfgs)
|
||||||
|
|
||||||
|
def execById(collective: Ident, runConfigId: Ident, logger: Logger[F])(
|
||||||
|
custom: Middleware[F]
|
||||||
|
): F[ExecResult] =
|
||||||
|
(for {
|
||||||
|
runCfg <- OptionT(findAddonRef(collective, runConfigId))
|
||||||
|
execRes <- OptionT.liftF(execRunConfig(logger, runCfg, custom))
|
||||||
|
} yield execRes).getOrElse(ExecResult.runConfigNotFound(runConfigId))
|
||||||
|
|
||||||
|
def execRunConfig(
|
||||||
|
logger: Logger[F],
|
||||||
|
runCfg: AddonRunConfigRef,
|
||||||
|
custom: Middleware[F]
|
||||||
|
): F[ExecResult] =
|
||||||
|
for {
|
||||||
|
executor <- getExecutor(cfg.executorConfig)
|
||||||
|
log = logger.withRunConfig(runCfg)
|
||||||
|
result <-
|
||||||
|
Directory.temp(cfg.workingDir, "addon-output-").use { outDir =>
|
||||||
|
val cacheDir = cfg.cacheDir / runCfg.id.id
|
||||||
|
val inputEnv =
|
||||||
|
InputEnv(runCfg.refs, cfg.workingDir, outDir, cacheDir, Env.empty)
|
||||||
|
|
||||||
|
for {
|
||||||
|
middleware <- createMiddleware(custom, runCfg)
|
||||||
|
res <- middleware(executor.execute(log)).run(inputEnv)
|
||||||
|
_ <- log.debug(s"Addon result: $res")
|
||||||
|
_ <- postProcess.onResult(log, runCfg.collective, res, outDir)
|
||||||
|
} yield res
|
||||||
|
}
|
||||||
|
execRes = ExecResult(List(result), List(runCfg))
|
||||||
|
} yield execRes
|
||||||
|
|
||||||
|
def createMiddleware(custom: Middleware[F], runCfg: AddonRunConfigRef) = for {
|
||||||
|
dscMW <- prepare.createDscEnv(runCfg, cfg.executorConfig.runTimeout)
|
||||||
|
mm = dscMW >> custom >> prepare.logResult(logger, runCfg) >> Middleware
|
||||||
|
.ephemeralRun[F]
|
||||||
|
} yield mm
|
||||||
|
|
||||||
|
def getExecutor(cfg: AddonExecutorConfig): F[AddonExecutor[F]] =
|
||||||
|
Async[F].pure(AddonExecutor(cfg, urlReader))
|
||||||
|
|
||||||
|
def findAddonRefs(
|
||||||
|
collective: Ident,
|
||||||
|
trigger: Set[AddonTriggerType],
|
||||||
|
runConfigIds: Set[Ident]
|
||||||
|
): F[List[AddonRunConfigRef]] =
|
||||||
|
store
|
||||||
|
.transact(
|
||||||
|
AddonRunConfigResolved.findAllForCollective(
|
||||||
|
collective,
|
||||||
|
enabled = true.some,
|
||||||
|
trigger,
|
||||||
|
runConfigIds
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.map(_.map(AddonRunConfigRef.fromResolved))
|
||||||
|
|
||||||
|
def findAddonRef(
|
||||||
|
collective: Ident,
|
||||||
|
runConfigId: Ident
|
||||||
|
): F[Option[AddonRunConfigRef]] =
|
||||||
|
OptionT(
|
||||||
|
store
|
||||||
|
.transact(
|
||||||
|
AddonRunConfigResolved.findById(
|
||||||
|
runConfigId,
|
||||||
|
collective,
|
||||||
|
enabled = Some(true)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).map(AddonRunConfigRef.fromResolved).value
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,198 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.joex
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect.kernel.Sync
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import docspell.addons._
|
||||||
|
import docspell.addons.out.{AddonOutput, ItemFile, NewItem}
|
||||||
|
import docspell.backend.JobFactory
|
||||||
|
import docspell.backend.ops.OAttachment
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.common.bc.BackendCommandRunner
|
||||||
|
import docspell.files.FileSupport
|
||||||
|
import docspell.logging.Logger
|
||||||
|
import docspell.scheduler.JobStore
|
||||||
|
import docspell.store.Store
|
||||||
|
import docspell.store.records._
|
||||||
|
|
||||||
|
final private[joex] class AddonPostProcess[F[_]: Sync: Files](
|
||||||
|
cmdRunner: BackendCommandRunner[F, Unit],
|
||||||
|
store: Store[F],
|
||||||
|
attachOps: OAttachment[F],
|
||||||
|
jobStore: JobStore[F]
|
||||||
|
) extends FileSupport {
|
||||||
|
|
||||||
|
def onResult(
|
||||||
|
logger: Logger[F],
|
||||||
|
collective: Ident,
|
||||||
|
result: AddonExecutionResult,
|
||||||
|
outputDir: Path
|
||||||
|
): F[Unit] =
|
||||||
|
result.addonResult match {
|
||||||
|
case AddonResult.Success(output) =>
|
||||||
|
onSuccess(logger, collective, output, outputDir)
|
||||||
|
case _ =>
|
||||||
|
().pure[F]
|
||||||
|
}
|
||||||
|
|
||||||
|
def onSuccess(
|
||||||
|
logger: Logger[F],
|
||||||
|
collective: Ident,
|
||||||
|
output: AddonOutput,
|
||||||
|
outputDir: Path
|
||||||
|
): F[Unit] =
|
||||||
|
for {
|
||||||
|
_ <- logger.info("Applying addon output")
|
||||||
|
_ <- cmdRunner.runAll(collective, output.commands)
|
||||||
|
_ <- logger.debug("Applying changes from files")
|
||||||
|
_ <- output.files.traverse_(updateOne(logger, collective, outputDir))
|
||||||
|
_ <- output.newItems.traverse_(submitNewItem(logger, collective, outputDir))
|
||||||
|
} yield ()
|
||||||
|
|
||||||
|
def submitNewItem(
|
||||||
|
logger: Logger[F],
|
||||||
|
collective: Ident,
|
||||||
|
outputDir: Path
|
||||||
|
)(newItem: NewItem): F[Unit] =
|
||||||
|
for {
|
||||||
|
_ <- logger.info(s"Submit new item with ${newItem.files.size} files")
|
||||||
|
files <- newItem.resolveFiles[F](logger, outputDir)
|
||||||
|
collLang <- store.transact(RCollective.findLanguage(collective))
|
||||||
|
uploaded <- files.traverse(file =>
|
||||||
|
file.readAll
|
||||||
|
.through(
|
||||||
|
store.fileRepo.save(
|
||||||
|
collective,
|
||||||
|
FileCategory.AttachmentSource,
|
||||||
|
MimeTypeHint.filename(file)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.compile
|
||||||
|
.lastOrError
|
||||||
|
.map(key => file.fileName.toString -> key)
|
||||||
|
)
|
||||||
|
_ <- logger.debug(s"Saved ${uploaded.size} files to be processed.")
|
||||||
|
args = ProcessItemArgs(
|
||||||
|
newItem.toProcessMeta(collective, collLang, "addon"),
|
||||||
|
uploaded.map(f => ProcessItemArgs.File(f._1.some, f._2))
|
||||||
|
)
|
||||||
|
account = AccountId(collective, DocspellSystem.user)
|
||||||
|
job <- JobFactory.processItem(args, account, Priority.High, None)
|
||||||
|
_ <- jobStore.insert(job.encode)
|
||||||
|
_ <- logger.debug(s"Submitted job for processing: ${job.id}")
|
||||||
|
} yield ()
|
||||||
|
|
||||||
|
def updateOne(logger: Logger[F], collective: Ident, outputDir: Path)(
|
||||||
|
itemFile: ItemFile
|
||||||
|
): F[Unit] =
|
||||||
|
for {
|
||||||
|
textFiles <- itemFile.resolveTextFiles(logger, outputDir)
|
||||||
|
pdfFiles <- itemFile.resolvePdfFiles(logger, outputDir)
|
||||||
|
previewFiles <- itemFile.resolvePreviewFiles(logger, outputDir)
|
||||||
|
attachs <- OptionT
|
||||||
|
.whenF(textFiles.nonEmpty || pdfFiles.nonEmpty || previewFiles.nonEmpty)(
|
||||||
|
store.transact(RAttachment.findByItem(itemFile.itemId))
|
||||||
|
)
|
||||||
|
.getOrElse(Vector.empty)
|
||||||
|
_ <- textFiles.traverse_ { case (key, file) =>
|
||||||
|
withAttach(logger, key, attachs) { ra =>
|
||||||
|
setText(collective, ra, file.readText)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ <- pdfFiles.traverse_ { case (key, file) =>
|
||||||
|
withAttach(logger, key, attachs) { ra =>
|
||||||
|
replacePdf(collective, ra, file, previewFiles.forall(_._1 != key))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ <- previewFiles.traverse_ { case (key, file) =>
|
||||||
|
withAttach(logger, key, attachs) { ra =>
|
||||||
|
replacePreview(collective, ra.id, file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ <- submitNewFiles(logger, collective, outputDir)(itemFile)
|
||||||
|
} yield ()
|
||||||
|
|
||||||
|
def submitNewFiles(
|
||||||
|
logger: Logger[F],
|
||||||
|
collective: Ident,
|
||||||
|
outputDir: Path
|
||||||
|
)(itemFile: ItemFile): F[Unit] =
|
||||||
|
for {
|
||||||
|
_ <- logger.info(s"Submitting new file for item")
|
||||||
|
collLang <- store.transact(RCollective.findLanguage(collective))
|
||||||
|
newFiles <- itemFile.resolveNewFiles(logger, outputDir)
|
||||||
|
byMeta = newFiles.groupBy(_._1.metadata).view.mapValues(_.map(_._2))
|
||||||
|
account = AccountId(collective, DocspellSystem.user)
|
||||||
|
_ <- byMeta.toList.traverse_ { case (meta, files) =>
|
||||||
|
for {
|
||||||
|
uploaded <- files.traverse(file =>
|
||||||
|
file.readAll
|
||||||
|
.through(
|
||||||
|
store.fileRepo.save(
|
||||||
|
collective,
|
||||||
|
FileCategory.AttachmentSource,
|
||||||
|
MimeTypeHint.filename(file)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.compile
|
||||||
|
.lastOrError
|
||||||
|
.map(key => file.fileName.toString -> key)
|
||||||
|
)
|
||||||
|
args = ProcessItemArgs(
|
||||||
|
meta.toProcessMeta(collective, itemFile.itemId, collLang, "addon"),
|
||||||
|
uploaded.map(f => ProcessItemArgs.File(f._1.some, f._2))
|
||||||
|
)
|
||||||
|
job <- JobFactory.processItem(args, account, Priority.High, None)
|
||||||
|
_ <- jobStore.insert(job.encode)
|
||||||
|
_ <- logger.debug(s"Submitted job for processing: ${job.id}")
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
} yield ()
|
||||||
|
|
||||||
|
private def withAttach(logger: Logger[F], key: String, attachs: Vector[RAttachment])(
|
||||||
|
run: RAttachment => F[Unit]
|
||||||
|
): F[Unit] =
|
||||||
|
OptionT
|
||||||
|
.fromOption(
|
||||||
|
attachs.find(a => a.id.id == key || key.toIntOption == a.position.some)
|
||||||
|
)
|
||||||
|
.semiflatMap(run)
|
||||||
|
.getOrElseF(logger.warn(s"Cannot find attachment for $key to update text!"))
|
||||||
|
|
||||||
|
private def setText(collective: Ident, ra: RAttachment, readText: F[String]): F[Unit] =
|
||||||
|
attachOps.setExtractedText(collective, ra.itemId, ra.id, readText)
|
||||||
|
|
||||||
|
private def replacePdf(
|
||||||
|
collective: Ident,
|
||||||
|
ra: RAttachment,
|
||||||
|
file: Path,
|
||||||
|
generatePreview: Boolean
|
||||||
|
): F[Unit] =
|
||||||
|
attachOps.addOrReplacePdf(collective, ra.id, file.readAll, generatePreview)
|
||||||
|
|
||||||
|
private def replacePreview(
|
||||||
|
collective: Ident,
|
||||||
|
attachId: Ident,
|
||||||
|
imageData: Path
|
||||||
|
): F[Unit] =
|
||||||
|
attachOps.addOrReplacePreview(collective, attachId, imageData.readAll)
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonPostProcess {
|
||||||
|
|
||||||
|
def apply[F[_]: Sync: Files](
|
||||||
|
cmdRunner: BackendCommandRunner[F, Unit],
|
||||||
|
store: Store[F],
|
||||||
|
attachment: OAttachment[F],
|
||||||
|
jobStore: JobStore[F]
|
||||||
|
): AddonPostProcess[F] =
|
||||||
|
new AddonPostProcess[F](cmdRunner, store, attachment, jobStore)
|
||||||
|
}
|
@ -0,0 +1,75 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.joex
|
||||||
|
|
||||||
|
import cats.data.{Kleisli, OptionT}
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
|
||||||
|
import docspell.addons.Middleware
|
||||||
|
import docspell.backend.auth.AuthToken
|
||||||
|
import docspell.backend.joex.AddonOps.AddonRunConfigRef
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.logging.Logger
|
||||||
|
import docspell.store.Store
|
||||||
|
import docspell.store.records.{RNode, RUser}
|
||||||
|
|
||||||
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
|
private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExtension {
|
||||||
|
|
||||||
|
def logResult(logger: Logger[F], ref: AddonRunConfigRef): Middleware[F] =
|
||||||
|
Middleware(_.mapF(_.attempt.flatTap {
|
||||||
|
case Right(_) => ().pure[F]
|
||||||
|
case Left(ex) =>
|
||||||
|
logger
|
||||||
|
.withRunConfig(ref)
|
||||||
|
.warn(ex)(s"Addon task '${ref.id.id}' has failed")
|
||||||
|
}.rethrow))
|
||||||
|
|
||||||
|
/** Creates environment variables for dsc to connect to the docspell server for the
|
||||||
|
* given run config.
|
||||||
|
*/
|
||||||
|
def createDscEnv(
|
||||||
|
runConfigRef: AddonRunConfigRef,
|
||||||
|
tokenValidity: Duration
|
||||||
|
): F[Middleware[F]] =
|
||||||
|
(for {
|
||||||
|
userId <- OptionT.fromOption[F](runConfigRef.userId)
|
||||||
|
user <- OptionT(store.transact(RUser.getIdByIdOrLogin(userId)))
|
||||||
|
account = AccountId(runConfigRef.collective, user.login)
|
||||||
|
env =
|
||||||
|
Middleware.prepare[F](
|
||||||
|
Kleisli(input => makeDscEnv(account, tokenValidity).map(input.addEnv))
|
||||||
|
)
|
||||||
|
} yield env).getOrElse(Middleware.identity[F])
|
||||||
|
|
||||||
|
/** Creates environment variables to have dsc automatically connect as the given user.
|
||||||
|
* Additionally a random rest-server is looked up from the database to set its url.
|
||||||
|
*/
|
||||||
|
def makeDscEnv(
|
||||||
|
accountId: AccountId,
|
||||||
|
tokenValidity: Duration
|
||||||
|
): F[Map[String, String]] =
|
||||||
|
for {
|
||||||
|
serverNode <- store.transact(
|
||||||
|
RNode
|
||||||
|
.findAll(NodeType.Restserver)
|
||||||
|
.map(_.sortBy(_.updated).lastOption)
|
||||||
|
)
|
||||||
|
url = serverNode.map(_.url).map(u => "DSC_DOCSPELL_URL" -> u.asString)
|
||||||
|
secret = serverNode.flatMap(_.serverSecret)
|
||||||
|
|
||||||
|
token <- AuthToken.user(
|
||||||
|
accountId,
|
||||||
|
false,
|
||||||
|
secret.getOrElse(ByteVector.empty),
|
||||||
|
tokenValidity.some
|
||||||
|
)
|
||||||
|
session = ("DSC_SESSION" -> token.asString).some
|
||||||
|
} yield List(url, session).flatten.toMap
|
||||||
|
}
|
@ -0,0 +1,18 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.joex
|
||||||
|
|
||||||
|
import docspell.backend.joex.AddonOps.AddonRunConfigRef
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
trait LoggerExtension {
|
||||||
|
|
||||||
|
implicit final class LoggerDataOps[F[_]](self: Logger[F]) {
|
||||||
|
def withRunConfig(t: AddonRunConfigRef): Logger[F] =
|
||||||
|
self.capture("addon-task-id", t.id)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,48 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.ops
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
|
||||||
|
import docspell.addons.{AddonArchive, AddonMeta, AddonTriggerType}
|
||||||
|
|
||||||
|
sealed trait AddonRunConfigError {
|
||||||
|
final def cast: AddonRunConfigError = this
|
||||||
|
|
||||||
|
def toLeft[A]: Either[AddonRunConfigError, A] = Left(this)
|
||||||
|
|
||||||
|
def message: String
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonRunConfigError {
|
||||||
|
|
||||||
|
case object MissingSchedule extends AddonRunConfigError {
|
||||||
|
val message =
|
||||||
|
"The run config has a trigger 'scheduled' but doesn't provide a schedule!"
|
||||||
|
}
|
||||||
|
|
||||||
|
case object ObsoleteSchedule extends AddonRunConfigError {
|
||||||
|
val message = "The run config has a schedule, but not a trigger 'Scheduled'."
|
||||||
|
}
|
||||||
|
|
||||||
|
case class MismatchingTrigger(unsupported: NonEmptyList[(String, AddonTriggerType)])
|
||||||
|
extends AddonRunConfigError {
|
||||||
|
def message: String = {
|
||||||
|
val list =
|
||||||
|
unsupported.map { case (name, tt) => s"$name: ${tt.name}" }.toList.mkString(", ")
|
||||||
|
s"Some listed addons don't support all defined triggers: $list"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object MismatchingTrigger {
|
||||||
|
def apply(addon: AddonMeta, tt: AddonTriggerType): MismatchingTrigger =
|
||||||
|
MismatchingTrigger(NonEmptyList.of(addon.nameAndVersion -> tt))
|
||||||
|
|
||||||
|
def apply(addon: AddonArchive, tt: AddonTriggerType): MismatchingTrigger =
|
||||||
|
MismatchingTrigger(NonEmptyList.of(addon.nameAndVersion -> tt))
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,54 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.ops
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
|
||||||
|
import docspell.backend.ops.AddonRunConfigError._
|
||||||
|
import docspell.backend.ops.OAddons.{AddonRunConfigResult, AddonRunInsert}
|
||||||
|
import docspell.common.Ident
|
||||||
|
import docspell.store.Store
|
||||||
|
import docspell.store.records.RAddonArchive
|
||||||
|
|
||||||
|
object AddonRunConfigValidate {
|
||||||
|
|
||||||
|
def apply[F[_]: Sync](store: Store[F], cid: Ident)(
|
||||||
|
cfg: AddonRunInsert
|
||||||
|
): F[AddonRunConfigResult[AddonRunInsert]] = {
|
||||||
|
val init: AddonRunConfigResult[Unit] = ().asRight
|
||||||
|
|
||||||
|
List(
|
||||||
|
checkScheduled(cfg).pure[F],
|
||||||
|
checkTriggers(store, cid)(cfg)
|
||||||
|
)
|
||||||
|
.foldLeftM(init)((res, fr) => fr.map(r => res.flatMap(_ => r)))
|
||||||
|
.map(_.as(cfg))
|
||||||
|
}
|
||||||
|
|
||||||
|
def checkTriggers[F[_]: Sync](store: Store[F], cid: Ident)(
|
||||||
|
cfg: AddonRunInsert
|
||||||
|
): F[AddonRunConfigResult[Unit]] =
|
||||||
|
for {
|
||||||
|
addons <- store.transact(RAddonArchive.findByIds(cid, cfg.addons.map(_.addonId)))
|
||||||
|
given = cfg.triggered.toList.toSet
|
||||||
|
res = addons
|
||||||
|
.flatMap(r => given.diff(r.triggers).map(tt => r.nameAndVersion -> tt))
|
||||||
|
|
||||||
|
maybeError = NonEmptyList
|
||||||
|
.fromList(res)
|
||||||
|
.map(nel => MismatchingTrigger(nel))
|
||||||
|
} yield maybeError.map(_.toLeft).getOrElse(Right(()))
|
||||||
|
|
||||||
|
def checkScheduled(cfg: AddonRunInsert): AddonRunConfigResult[Unit] =
|
||||||
|
(cfg.isScheduled, cfg.schedule) match {
|
||||||
|
case (true, None) => MissingSchedule.toLeft[Unit]
|
||||||
|
case (false, Some(_)) => ObsoleteSchedule.toLeft[Unit]
|
||||||
|
case _ => ().asRight
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,156 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.ops
|
||||||
|
|
||||||
|
import cats.data.EitherT
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.Stream
|
||||||
|
import fs2.io.file.Path
|
||||||
|
|
||||||
|
import docspell.addons.{AddonMeta, RunnerType}
|
||||||
|
import docspell.backend.Config
|
||||||
|
import docspell.backend.ops.AddonValidationError._
|
||||||
|
import docspell.backend.ops.OAddons.AddonValidationResult
|
||||||
|
import docspell.common.{Ident, LenientUri, UrlReader}
|
||||||
|
import docspell.joexapi.model.AddonSupport
|
||||||
|
import docspell.store.Store
|
||||||
|
import docspell.store.records.RAddonArchive
|
||||||
|
|
||||||
|
final class AddonValidate[F[_]: Async](
|
||||||
|
cfg: Config.Addons,
|
||||||
|
store: Store[F],
|
||||||
|
joexOps: OJoex[F]
|
||||||
|
) {
|
||||||
|
private[this] val logger = docspell.logging.getLogger[F]
|
||||||
|
|
||||||
|
def fromUrl(
|
||||||
|
collective: Ident,
|
||||||
|
url: LenientUri,
|
||||||
|
reader: UrlReader[F],
|
||||||
|
localUrl: Option[LenientUri] = None,
|
||||||
|
checkExisting: Boolean = true
|
||||||
|
): F[AddonValidationResult[AddonMeta]] =
|
||||||
|
if (!cfg.enabled) AddonsDisabled.resultF
|
||||||
|
else if (cfg.isDenied(url)) UrlUntrusted(url).resultF
|
||||||
|
else if (checkExisting)
|
||||||
|
store.transact(RAddonArchive.findByUrl(collective, url)).flatMap {
|
||||||
|
case Some(ar) =>
|
||||||
|
AddonExists("An addon with this url already exists!", ar).resultF
|
||||||
|
case None =>
|
||||||
|
archive(collective, reader(localUrl.getOrElse(url)).asRight, checkExisting)
|
||||||
|
}
|
||||||
|
else archive(collective, reader(localUrl.getOrElse(url)).asRight, checkExisting)
|
||||||
|
|
||||||
|
def archive(
|
||||||
|
collective: Ident,
|
||||||
|
addonData: Either[Path, Stream[F, Byte]],
|
||||||
|
checkExisting: Boolean = true
|
||||||
|
): F[AddonValidationResult[AddonMeta]] =
|
||||||
|
(for {
|
||||||
|
_ <- EitherT.cond[F](cfg.enabled, (), AddonsDisabled.cast)
|
||||||
|
|
||||||
|
meta <-
|
||||||
|
EitherT(
|
||||||
|
addonData
|
||||||
|
.fold(
|
||||||
|
AddonMeta.findInDirectory[F],
|
||||||
|
AddonMeta.findInZip[F]
|
||||||
|
)
|
||||||
|
.attempt
|
||||||
|
)
|
||||||
|
.leftMap(ex => NotAnAddon(ex).cast)
|
||||||
|
_ <- EitherT.cond(
|
||||||
|
meta.triggers.exists(_.nonEmpty),
|
||||||
|
(),
|
||||||
|
InvalidAddon(
|
||||||
|
"The addon doesn't define any triggers. At least one is required!"
|
||||||
|
).cast
|
||||||
|
)
|
||||||
|
_ <- EitherT.cond(
|
||||||
|
meta.options.exists(_.isUseful),
|
||||||
|
(),
|
||||||
|
InvalidAddon(
|
||||||
|
"Addon defines no output and no networking. It can't do anything useful."
|
||||||
|
).cast
|
||||||
|
)
|
||||||
|
_ <- EitherT.cond(cfg.allowImpure || meta.isPure, (), ImpureAddonsDisabled.cast)
|
||||||
|
|
||||||
|
_ <-
|
||||||
|
if (checkExisting)
|
||||||
|
EitherT(
|
||||||
|
store
|
||||||
|
.transact(
|
||||||
|
RAddonArchive
|
||||||
|
.findByNameAndVersion(collective, meta.meta.name, meta.meta.version)
|
||||||
|
)
|
||||||
|
.map {
|
||||||
|
case Some(ar) => AddonExists(ar).result
|
||||||
|
case None => rightUnit
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else rightUnitT
|
||||||
|
|
||||||
|
joexSupport <- EitherT.liftF(joexOps.getAddonSupport)
|
||||||
|
addonRunners <- EitherT.liftF(meta.enabledTypes(addonData))
|
||||||
|
_ <- EitherT.liftF(
|
||||||
|
logger.info(
|
||||||
|
s"Comparing joex support vs addon runner: $joexSupport vs. $addonRunners"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
_ <- EitherT.fromEither(validateJoexSupport(addonRunners, joexSupport))
|
||||||
|
|
||||||
|
} yield meta).value
|
||||||
|
|
||||||
|
private def validateJoexSupport(
|
||||||
|
addonRunnerTypes: List[RunnerType],
|
||||||
|
joexSupport: List[AddonSupport]
|
||||||
|
): AddonValidationResult[Unit] = {
|
||||||
|
val addonRunners = addonRunnerTypes.mkString(", ")
|
||||||
|
for {
|
||||||
|
_ <- Either.cond(
|
||||||
|
joexSupport.nonEmpty,
|
||||||
|
(),
|
||||||
|
AddonUnsupported("There are no joex nodes that have addons enabled!", Nil).cast
|
||||||
|
)
|
||||||
|
_ <- Either.cond(
|
||||||
|
addonRunners.nonEmpty,
|
||||||
|
(),
|
||||||
|
InvalidAddon("The addon doesn't enable any runner.")
|
||||||
|
)
|
||||||
|
|
||||||
|
ids = joexSupport
|
||||||
|
.map(n => n.nodeId -> n.runners.intersect(addonRunnerTypes).toSet)
|
||||||
|
|
||||||
|
unsupportedJoex = ids.filter(_._2.isEmpty).map(_._1)
|
||||||
|
|
||||||
|
_ <- Either.cond(
|
||||||
|
ids.forall(_._2.nonEmpty),
|
||||||
|
(),
|
||||||
|
AddonUnsupported(
|
||||||
|
s"A joex node doesn't support this addons runners: $addonRunners. " +
|
||||||
|
s"Check: ${unsupportedJoex.map(_.id).mkString(", ")}.",
|
||||||
|
unsupportedJoex
|
||||||
|
).cast
|
||||||
|
)
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
private def rightUnit: AddonValidationResult[Unit] =
|
||||||
|
().asRight[AddonValidationError]
|
||||||
|
|
||||||
|
private def rightUnitT: EitherT[F, AddonValidationError, Unit] =
|
||||||
|
EitherT.fromEither(rightUnit)
|
||||||
|
|
||||||
|
implicit final class ErrorOps(self: AddonValidationError) {
|
||||||
|
def result: AddonValidationResult[AddonMeta] =
|
||||||
|
self.toLeft
|
||||||
|
|
||||||
|
def resultF: F[AddonValidationResult[AddonMeta]] =
|
||||||
|
result.pure[F]
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,85 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.ops
|
||||||
|
|
||||||
|
import docspell.common.{Ident, LenientUri}
|
||||||
|
import docspell.store.records.RAddonArchive
|
||||||
|
|
||||||
|
import io.circe.generic.extras.Configuration
|
||||||
|
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
sealed trait AddonValidationError {
|
||||||
|
def cast: AddonValidationError = this
|
||||||
|
|
||||||
|
def toLeft[A]: Either[AddonValidationError, A] = Left(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
object AddonValidationError {
|
||||||
|
|
||||||
|
implicit private val throwableDecoder: Decoder[Throwable] =
|
||||||
|
Decoder.decodeString.map(new Exception(_))
|
||||||
|
implicit private val throwableEncoder: Encoder[Throwable] =
|
||||||
|
Encoder.encodeString.contramap(_.getMessage)
|
||||||
|
|
||||||
|
case object AddonsDisabled extends AddonValidationError {}
|
||||||
|
|
||||||
|
case class UrlUntrusted(url: LenientUri) extends AddonValidationError
|
||||||
|
object UrlUntrusted {
|
||||||
|
implicit val jsonDecoder: Decoder[UrlUntrusted] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[UrlUntrusted] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class NotAnAddon(error: Throwable) extends AddonValidationError
|
||||||
|
object NotAnAddon {
|
||||||
|
implicit val jsonDecoder: Decoder[NotAnAddon] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[NotAnAddon] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class AddonUnsupported(message: String, affectedNodes: List[Ident])
|
||||||
|
extends AddonValidationError
|
||||||
|
object AddonUnsupported {
|
||||||
|
implicit val jsonDecoder: Decoder[AddonUnsupported] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[AddonUnsupported] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class InvalidAddon(message: String) extends AddonValidationError
|
||||||
|
object InvalidAddon {
|
||||||
|
implicit val jsonDecoder: Decoder[InvalidAddon] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[InvalidAddon] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class AddonExists(message: String, addon: RAddonArchive)
|
||||||
|
extends AddonValidationError
|
||||||
|
object AddonExists {
|
||||||
|
def apply(addon: RAddonArchive): AddonExists =
|
||||||
|
AddonExists(s"An addon '${addon.name}/${addon.version}' already exists!", addon)
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[AddonExists] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[AddonExists] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case object AddonNotFound extends AddonValidationError
|
||||||
|
|
||||||
|
case class DownloadFailed(error: Throwable) extends AddonValidationError
|
||||||
|
object DownloadFailed {
|
||||||
|
implicit val jsonDecoder: Decoder[DownloadFailed] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[DownloadFailed] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case object ImpureAddonsDisabled extends AddonValidationError
|
||||||
|
|
||||||
|
case object RefreshLocalAddon extends AddonValidationError
|
||||||
|
|
||||||
|
implicit val jsonConfig: Configuration =
|
||||||
|
Configuration.default.withKebabCaseConstructorNames
|
||||||
|
.withDiscriminator("errorType")
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[AddonValidationError] = deriveConfiguredDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[AddonValidationError] = deriveConfiguredEncoder
|
||||||
|
}
|
@ -0,0 +1,426 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.ops
|
||||||
|
|
||||||
|
import cats.data.{EitherT, NonEmptyList, OptionT}
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
|
||||||
|
import docspell.addons.{AddonMeta, AddonTriggerType}
|
||||||
|
import docspell.backend.ops.AddonValidationError._
|
||||||
|
import docspell.backend.ops.OAddons._
|
||||||
|
import docspell.backend.{Config, JobFactory}
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.logging.Logger
|
||||||
|
import docspell.scheduler.JobStore
|
||||||
|
import docspell.scheduler.usertask.{UserTask, UserTaskScope, UserTaskStore}
|
||||||
|
import docspell.store.Store
|
||||||
|
import docspell.store.file.FileUrlReader
|
||||||
|
import docspell.store.records._
|
||||||
|
|
||||||
|
import com.github.eikek.calev.CalEvent
|
||||||
|
|
||||||
|
trait OAddons[F[_]] {
|
||||||
|
|
||||||
|
/** Registers a new addon. An error is returned if an addon with this url already
|
||||||
|
* exists.
|
||||||
|
*/
|
||||||
|
def registerAddon(
|
||||||
|
collective: Ident,
|
||||||
|
url: LenientUri,
|
||||||
|
logger: Option[Logger[F]]
|
||||||
|
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]]
|
||||||
|
|
||||||
|
/** Refreshes an existing addon by downloading it again and updating metadata. */
|
||||||
|
def refreshAddon(
|
||||||
|
collective: Ident,
|
||||||
|
addonId: Ident
|
||||||
|
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]]
|
||||||
|
|
||||||
|
/** Look into the addon at the given url and return its metadata. */
|
||||||
|
def inspectAddon(
|
||||||
|
collective: Ident,
|
||||||
|
url: LenientUri
|
||||||
|
): F[AddonValidationResult[AddonMeta]]
|
||||||
|
|
||||||
|
/** Deletes the addon if it exists. */
|
||||||
|
def deleteAddon(collective: Ident, addonId: Ident): F[Boolean]
|
||||||
|
|
||||||
|
def getAllAddons(collective: Ident): F[List[RAddonArchive]]
|
||||||
|
|
||||||
|
/** Inserts or updates the addon run configuration. If it already exists (and the given
|
||||||
|
* id is non empty), it will be completely replaced with the given one.
|
||||||
|
*/
|
||||||
|
def upsertAddonRunConfig(
|
||||||
|
collective: Ident,
|
||||||
|
runConfig: AddonRunInsert
|
||||||
|
): F[AddonRunConfigResult[Ident]]
|
||||||
|
|
||||||
|
/** Deletes this task from the database. */
|
||||||
|
def deleteAddonRunConfig(collective: Ident, runConfigId: Ident): F[Boolean]
|
||||||
|
|
||||||
|
def getAllAddonRunConfigs(collective: Ident): F[List[AddonRunInfo]]
|
||||||
|
|
||||||
|
def runAddonForItem(
|
||||||
|
account: AccountId,
|
||||||
|
itemIds: NonEmptyList[Ident],
|
||||||
|
addonRunConfigIds: Set[Ident]
|
||||||
|
): F[Unit]
|
||||||
|
}
|
||||||
|
|
||||||
|
object OAddons {
|
||||||
|
val scheduledAddonTaskName: Ident =
|
||||||
|
ScheduledAddonTaskArgs.taskName
|
||||||
|
|
||||||
|
case class AddonRunInsert(
|
||||||
|
id: Ident,
|
||||||
|
name: String,
|
||||||
|
enabled: Boolean,
|
||||||
|
userId: Option[Ident],
|
||||||
|
schedule: Option[CalEvent],
|
||||||
|
triggered: NonEmptyList[AddonTriggerType],
|
||||||
|
addons: NonEmptyList[AddonArgs]
|
||||||
|
) {
|
||||||
|
|
||||||
|
def isScheduled: Boolean =
|
||||||
|
triggered.exists(_ == AddonTriggerType.Scheduled)
|
||||||
|
}
|
||||||
|
case class AddonArgs(addonId: Ident, args: String)
|
||||||
|
|
||||||
|
case class AddonRunInfo(
|
||||||
|
id: Ident,
|
||||||
|
name: String,
|
||||||
|
enabled: Boolean,
|
||||||
|
userId: Option[Ident],
|
||||||
|
schedule: Option[CalEvent],
|
||||||
|
triggered: List[AddonTriggerType],
|
||||||
|
addons: List[(RAddonArchive, RAddonRunConfigAddon)]
|
||||||
|
)
|
||||||
|
object AddonRunInfo {
|
||||||
|
def fromRunConfigData(
|
||||||
|
timer: Option[CalEvent],
|
||||||
|
addons: List[(RAddonArchive, RAddonRunConfigAddon)]
|
||||||
|
)(t: AddonRunConfigData): AddonRunInfo =
|
||||||
|
AddonRunInfo(
|
||||||
|
id = t.runConfig.id,
|
||||||
|
name = t.runConfig.name,
|
||||||
|
enabled = t.runConfig.enabled,
|
||||||
|
userId = t.runConfig.userId,
|
||||||
|
schedule = timer,
|
||||||
|
triggered = t.triggers.map(_.trigger),
|
||||||
|
addons = addons
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
type AddonRunConfigResult[A] = Either[AddonRunConfigError, A]
|
||||||
|
object AddonRunConfigResult {
|
||||||
|
def success[A](value: A): AddonRunConfigResult[A] = Right(value)
|
||||||
|
def failure[A](error: AddonRunConfigError): AddonRunConfigResult[A] = error.toLeft[A]
|
||||||
|
}
|
||||||
|
|
||||||
|
type AddonValidationResult[A] = Either[AddonValidationError, A]
|
||||||
|
object AddonValidationResult {
|
||||||
|
def success[A](value: A): AddonValidationResult[A] = Right(value)
|
||||||
|
def failure[A](error: AddonValidationError): AddonValidationResult[A] = Left(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
def apply[F[_]: Async](
|
||||||
|
cfg: Config.Addons,
|
||||||
|
store: Store[F],
|
||||||
|
userTasks: UserTaskStore[F],
|
||||||
|
jobStore: JobStore[F],
|
||||||
|
joex: OJoex[F]
|
||||||
|
): OAddons[F] =
|
||||||
|
new OAddons[F] {
|
||||||
|
private[this] val logger = docspell.logging.getLogger[F]
|
||||||
|
private val urlReader = FileUrlReader(store.fileRepo)
|
||||||
|
private val zip = MimeType.zip.asString
|
||||||
|
private val addonValidate = new AddonValidate[F](cfg, store, joex)
|
||||||
|
|
||||||
|
def getAllAddonRunConfigs(collective: Ident): F[List[AddonRunInfo]] =
|
||||||
|
for {
|
||||||
|
all <- store.transact(AddonRunConfigData.findAll(collective))
|
||||||
|
runConfigIDs = all.map(_.runConfig.id).toSet
|
||||||
|
archiveIds = all.flatMap(_.addons.map(_.addonId)).distinct
|
||||||
|
archives <- NonEmptyList
|
||||||
|
.fromList(archiveIds)
|
||||||
|
.fold(List.empty[RAddonArchive].pure[F])(ids =>
|
||||||
|
store.transact(RAddonArchive.findByIds(collective, ids))
|
||||||
|
)
|
||||||
|
archivesMap = archives.groupBy(_.id)
|
||||||
|
ptask <- userTasks
|
||||||
|
.getAll(UserTaskScope.collective(collective))
|
||||||
|
.filter(ut => runConfigIDs.contains(ut.id))
|
||||||
|
.map(ut => ut.id -> ut)
|
||||||
|
.compile
|
||||||
|
.toList
|
||||||
|
.map(_.toMap)
|
||||||
|
result = all.map { t =>
|
||||||
|
AddonRunInfo.fromRunConfigData(
|
||||||
|
ptask.get(t.runConfig.id).map(_.timer),
|
||||||
|
t.addons.map(raa => (archivesMap(raa.addonId).head, raa))
|
||||||
|
)(t)
|
||||||
|
}
|
||||||
|
} yield result
|
||||||
|
|
||||||
|
def upsertAddonRunConfig(
|
||||||
|
collective: Ident,
|
||||||
|
runConfig: AddonRunInsert
|
||||||
|
): F[AddonRunConfigResult[Ident]] = {
|
||||||
|
val insertDataRaw = AddonRunConfigData(
|
||||||
|
RAddonRunConfig(
|
||||||
|
runConfig.id,
|
||||||
|
collective,
|
||||||
|
runConfig.userId,
|
||||||
|
runConfig.name,
|
||||||
|
runConfig.enabled,
|
||||||
|
Timestamp.Epoch
|
||||||
|
),
|
||||||
|
runConfig.addons.zipWithIndex.map { case (a, index) =>
|
||||||
|
RAddonRunConfigAddon(Ident.unsafe(""), runConfig.id, a.addonId, a.args, index)
|
||||||
|
}.toList,
|
||||||
|
runConfig.triggered
|
||||||
|
.map(t => RAddonRunConfigTrigger(Ident.unsafe(""), runConfig.id, t))
|
||||||
|
.toList
|
||||||
|
)
|
||||||
|
|
||||||
|
val upsert = for {
|
||||||
|
userId <-
|
||||||
|
OptionT
|
||||||
|
.fromOption(runConfig.userId)
|
||||||
|
.flatMapF(uid => store.transact(RUser.getIdByIdOrLogin(uid)))
|
||||||
|
.map(_.uid)
|
||||||
|
.value
|
||||||
|
insertData =
|
||||||
|
insertDataRaw.copy(runConfig =
|
||||||
|
insertDataRaw.runConfig.copy(userId = userId.orElse(runConfig.userId))
|
||||||
|
)
|
||||||
|
id <-
|
||||||
|
OptionT(store.transact(RAddonRunConfig.findById(collective, runConfig.id)))
|
||||||
|
.map(rt =>
|
||||||
|
AddonRunConfigData(
|
||||||
|
rt.copy(
|
||||||
|
userId = insertData.runConfig.userId,
|
||||||
|
name = insertData.runConfig.name,
|
||||||
|
enabled = insertData.runConfig.enabled
|
||||||
|
),
|
||||||
|
insertData.addons,
|
||||||
|
insertData.triggers
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.semiflatMap(rt =>
|
||||||
|
store.transact(AddonRunConfigData.update(rt).as(rt.runConfig.id))
|
||||||
|
)
|
||||||
|
.getOrElseF(store.transact(AddonRunConfigData.insert(insertData)))
|
||||||
|
} yield id
|
||||||
|
|
||||||
|
EitherT(AddonRunConfigValidate(store, collective)(runConfig))
|
||||||
|
.semiflatMap(_ =>
|
||||||
|
upsert.flatTap { runConfigId =>
|
||||||
|
runConfig.schedule match {
|
||||||
|
case Some(timer) =>
|
||||||
|
userTasks.updateTask(
|
||||||
|
UserTaskScope.collective(collective),
|
||||||
|
s"Addon task ${runConfig.name}".some,
|
||||||
|
UserTask(
|
||||||
|
runConfigId,
|
||||||
|
scheduledAddonTaskName,
|
||||||
|
true,
|
||||||
|
timer,
|
||||||
|
s"Running scheduled addon task ${runConfig.name}".some,
|
||||||
|
ScheduledAddonTaskArgs(collective, runConfigId)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
case None =>
|
||||||
|
userTasks.deleteTask(UserTaskScope.collective(collective), runConfigId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.value
|
||||||
|
}
|
||||||
|
|
||||||
|
def deleteAddonRunConfig(collective: Ident, runConfigId: Ident): F[Boolean] = {
|
||||||
|
val deleteRunConfig =
|
||||||
|
(for {
|
||||||
|
e <- OptionT(RAddonRunConfig.findById(collective, runConfigId))
|
||||||
|
_ <- OptionT.liftF(RAddonRunConfigAddon.deleteAllForConfig(e.id))
|
||||||
|
_ <- OptionT.liftF(RAddonRunConfigTrigger.deleteAllForConfig(e.id))
|
||||||
|
_ <- OptionT.liftF(RAddonRunConfig.deleteById(collective, e.id))
|
||||||
|
} yield true).getOrElse(false)
|
||||||
|
|
||||||
|
for {
|
||||||
|
deleted <- store.transact(deleteRunConfig)
|
||||||
|
_ <-
|
||||||
|
if (deleted)
|
||||||
|
userTasks.deleteTask(UserTaskScope.collective(collective), runConfigId)
|
||||||
|
else 0.pure[F]
|
||||||
|
} yield deleted
|
||||||
|
}
|
||||||
|
|
||||||
|
def getAllAddons(collective: Ident): F[List[RAddonArchive]] =
|
||||||
|
store.transact(RAddonArchive.listAll(collective))
|
||||||
|
|
||||||
|
def deleteAddon(collective: Ident, addonId: Ident): F[Boolean] =
|
||||||
|
store.transact(RAddonArchive.deleteById(collective, addonId)).map(_ > 0)
|
||||||
|
|
||||||
|
def inspectAddon(
|
||||||
|
collective: Ident,
|
||||||
|
url: LenientUri
|
||||||
|
): F[AddonValidationResult[AddonMeta]] =
|
||||||
|
addonValidate.fromUrl(collective, url, urlReader, checkExisting = false)
|
||||||
|
|
||||||
|
def registerAddon(
|
||||||
|
collective: Ident,
|
||||||
|
url: LenientUri,
|
||||||
|
logger: Option[Logger[F]]
|
||||||
|
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] = {
|
||||||
|
val log = logger.getOrElse(this.logger)
|
||||||
|
def validateAndInsert(file: FileKey, localUrl: LenientUri) =
|
||||||
|
addonValidate.fromUrl(collective, url, urlReader, localUrl.some).flatMap {
|
||||||
|
case Right(meta) =>
|
||||||
|
insertAddon(collective, url, meta, file)
|
||||||
|
.map(ar => AddonValidationResult.success(ar -> meta))
|
||||||
|
|
||||||
|
case Left(error) =>
|
||||||
|
store.fileRepo
|
||||||
|
.delete(file)
|
||||||
|
.as(AddonValidationResult.failure[(RAddonArchive, AddonMeta)](error))
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info(s"Store addon file from '${url.asString} for ${collective.id}") *>
|
||||||
|
storeAddonFromUrl(collective, url).flatMapF { file =>
|
||||||
|
val localUrl = FileUrlReader.url(file)
|
||||||
|
for {
|
||||||
|
_ <- log.info(s"Validating addon…")
|
||||||
|
res <- validateAndInsert(file, localUrl)
|
||||||
|
_ <- log.info(s"Validation result: $res")
|
||||||
|
} yield res
|
||||||
|
}.value
|
||||||
|
}
|
||||||
|
|
||||||
|
def refreshAddon(
|
||||||
|
collective: Ident,
|
||||||
|
addonId: Ident
|
||||||
|
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] = {
|
||||||
|
val findAddon = store
|
||||||
|
.transact(RAddonArchive.findById(collective, addonId))
|
||||||
|
.map(_.toRight(AddonNotFound))
|
||||||
|
def validateAddon(aa: RAddonArchive): F[AddonValidationResult[AddonMeta]] =
|
||||||
|
aa.originalUrl.fold(
|
||||||
|
AddonValidationResult.failure[AddonMeta](RefreshLocalAddon).pure[F]
|
||||||
|
)(url =>
|
||||||
|
addonValidate.fromUrl(collective, url, urlReader, checkExisting = false)
|
||||||
|
)
|
||||||
|
|
||||||
|
EitherT(findAddon).flatMap { aa =>
|
||||||
|
EitherT(validateAddon(aa))
|
||||||
|
.flatMap(meta => refreshAddon(aa, meta).map(na => na -> meta))
|
||||||
|
}.value
|
||||||
|
}
|
||||||
|
|
||||||
|
private def refreshAddon(
|
||||||
|
r: RAddonArchive,
|
||||||
|
meta: AddonMeta
|
||||||
|
): EitherT[F, AddonValidationError, RAddonArchive] =
|
||||||
|
if (r.isUnchanged(meta)) EitherT.pure(r)
|
||||||
|
else
|
||||||
|
r.originalUrl match {
|
||||||
|
case Some(url) =>
|
||||||
|
EitherT(
|
||||||
|
store
|
||||||
|
.transact(
|
||||||
|
RAddonArchive
|
||||||
|
.findByNameAndVersion(r.cid, meta.meta.name, meta.meta.version)
|
||||||
|
)
|
||||||
|
.map(
|
||||||
|
_.fold(().asRight[AddonValidationError])(rx => AddonExists(rx).toLeft)
|
||||||
|
)
|
||||||
|
).flatMap(_ =>
|
||||||
|
storeAddonFromUrl(r.cid, url).flatMap { file =>
|
||||||
|
val nr = r.update(file, meta)
|
||||||
|
for {
|
||||||
|
_ <- EitherT(
|
||||||
|
store
|
||||||
|
.transact(RAddonArchive.update(nr))
|
||||||
|
.map(_.asRight[AddonValidationError])
|
||||||
|
.recoverWith { case ex =>
|
||||||
|
logger.warn(ex)(s"Storing addon metadata failed.") *>
|
||||||
|
store.fileRepo
|
||||||
|
.delete(file)
|
||||||
|
.as(
|
||||||
|
AddonExists(
|
||||||
|
s"The addon '${nr.name}/${nr.version}' could not be stored",
|
||||||
|
nr
|
||||||
|
).toLeft
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
_ <- EitherT.liftF(store.fileRepo.delete(r.fileId))
|
||||||
|
} yield nr
|
||||||
|
}
|
||||||
|
)
|
||||||
|
case None =>
|
||||||
|
EitherT.leftT(RefreshLocalAddon.cast)
|
||||||
|
}
|
||||||
|
|
||||||
|
private def insertAddon(
|
||||||
|
collective: Ident,
|
||||||
|
url: LenientUri,
|
||||||
|
meta: AddonMeta,
|
||||||
|
file: FileKey
|
||||||
|
): F[RAddonArchive] =
|
||||||
|
for {
|
||||||
|
now <- Timestamp.current[F]
|
||||||
|
aId <- Ident.randomId[F]
|
||||||
|
record = RAddonArchive(
|
||||||
|
aId,
|
||||||
|
collective,
|
||||||
|
file,
|
||||||
|
url.some,
|
||||||
|
meta,
|
||||||
|
now
|
||||||
|
)
|
||||||
|
_ <- store
|
||||||
|
.transact(RAddonArchive.insert(record, silent = false))
|
||||||
|
.onError(_ => store.fileRepo.delete(file))
|
||||||
|
} yield record
|
||||||
|
|
||||||
|
private def storeAddonFromUrl(collective: Ident, url: LenientUri) =
|
||||||
|
for {
|
||||||
|
urlFile <- EitherT.pure(url.path.segments.lastOption)
|
||||||
|
file <- EitherT(
|
||||||
|
urlReader(url)
|
||||||
|
.through(
|
||||||
|
store.fileRepo.save(
|
||||||
|
collective,
|
||||||
|
FileCategory.Addon,
|
||||||
|
MimeTypeHint(urlFile, zip.some)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.compile
|
||||||
|
.lastOrError
|
||||||
|
.attempt
|
||||||
|
.map(_.leftMap(DownloadFailed(_).cast))
|
||||||
|
)
|
||||||
|
} yield file
|
||||||
|
|
||||||
|
def runAddonForItem(
|
||||||
|
account: AccountId,
|
||||||
|
itemIds: NonEmptyList[Ident],
|
||||||
|
addonRunConfigIds: Set[Ident]
|
||||||
|
): F[Unit] =
|
||||||
|
for {
|
||||||
|
jobs <- itemIds.traverse(id =>
|
||||||
|
JobFactory.existingItemAddon(
|
||||||
|
ItemAddonTaskArgs(account.collective, id, addonRunConfigIds),
|
||||||
|
account
|
||||||
|
)
|
||||||
|
)
|
||||||
|
_ <- jobStore.insertAllIfNew(jobs.map(_.encode).toList)
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,223 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.ops
|
||||||
|
|
||||||
|
import cats.data.{NonEmptyList => Nel, OptionT}
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.Stream
|
||||||
|
|
||||||
|
import docspell.backend.JobFactory
|
||||||
|
import docspell.common.MakePreviewArgs.StoreMode
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.files.TikaMimetype
|
||||||
|
import docspell.ftsclient.{FtsClient, TextData}
|
||||||
|
import docspell.scheduler.JobStore
|
||||||
|
import docspell.store.Store
|
||||||
|
import docspell.store.queries.QAttachment
|
||||||
|
import docspell.store.records._
|
||||||
|
|
||||||
|
trait OAttachment[F[_]] {
|
||||||
|
|
||||||
|
def setExtractedText(
|
||||||
|
collective: Ident,
|
||||||
|
itemId: Ident,
|
||||||
|
attachId: Ident,
|
||||||
|
newText: F[String]
|
||||||
|
): F[Unit]
|
||||||
|
|
||||||
|
def addOrReplacePdf(
|
||||||
|
collective: Ident,
|
||||||
|
attachId: Ident,
|
||||||
|
pdfData: Stream[F, Byte],
|
||||||
|
regeneratePreview: Boolean
|
||||||
|
): F[Unit]
|
||||||
|
|
||||||
|
def addOrReplacePreview(
|
||||||
|
collective: Ident,
|
||||||
|
attachId: Ident,
|
||||||
|
imageData: Stream[F, Byte]
|
||||||
|
): F[Unit]
|
||||||
|
}
|
||||||
|
|
||||||
|
object OAttachment {
|
||||||
|
|
||||||
|
def apply[F[_]: Sync](
|
||||||
|
store: Store[F],
|
||||||
|
fts: FtsClient[F],
|
||||||
|
jobStore: JobStore[F]
|
||||||
|
): OAttachment[F] =
|
||||||
|
new OAttachment[F] {
|
||||||
|
private[this] val logger = docspell.logging.getLogger[F]
|
||||||
|
|
||||||
|
def setExtractedText(
|
||||||
|
collective: Ident,
|
||||||
|
itemId: Ident,
|
||||||
|
attachId: Ident,
|
||||||
|
newText: F[String]
|
||||||
|
): F[Unit] =
|
||||||
|
for {
|
||||||
|
_ <- logger.info(s"Find attachment ${attachId.id} to update extracted text.")
|
||||||
|
cca <- store
|
||||||
|
.transact(
|
||||||
|
QAttachment
|
||||||
|
.allAttachmentMetaAndName(
|
||||||
|
collective.some,
|
||||||
|
Nel.of(itemId).some,
|
||||||
|
ItemState.validStates.append(ItemState.Processing),
|
||||||
|
100
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.filter(_.id == attachId)
|
||||||
|
.compile
|
||||||
|
.last
|
||||||
|
content = cca.find(_.id == attachId)
|
||||||
|
_ <- logger.debug(s"Found existing metadata: ${content.isDefined}")
|
||||||
|
_ <- OptionT
|
||||||
|
.fromOption(content)
|
||||||
|
.semiflatMap { cnt =>
|
||||||
|
for {
|
||||||
|
_ <- logger.debug(s"Setting new extracted text on ${cnt.id.id}")
|
||||||
|
text <- newText
|
||||||
|
td = TextData.attachment(
|
||||||
|
cnt.item,
|
||||||
|
cnt.id,
|
||||||
|
cnt.collective,
|
||||||
|
cnt.folder,
|
||||||
|
cnt.lang,
|
||||||
|
cnt.name,
|
||||||
|
text.some
|
||||||
|
)
|
||||||
|
_ <- store.transact(RAttachmentMeta.updateContent(attachId, text))
|
||||||
|
_ <- fts.updateIndex(logger, td)
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
.getOrElseF(
|
||||||
|
logger.warn(
|
||||||
|
s"Item or attachment meta not found to update text: ${itemId.id}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
} yield ()
|
||||||
|
|
||||||
|
def addOrReplacePdf(
|
||||||
|
collective: Ident,
|
||||||
|
attachId: Ident,
|
||||||
|
pdfData: Stream[F, Byte],
|
||||||
|
regeneratePreview: Boolean
|
||||||
|
): F[Unit] = {
|
||||||
|
def generatePreview(ra: RAttachment): F[Unit] =
|
||||||
|
JobFactory
|
||||||
|
.makePreview(MakePreviewArgs(ra.id, StoreMode.Replace), None)
|
||||||
|
.map(_.encode)
|
||||||
|
.flatMap(jobStore.insert) *>
|
||||||
|
logger.info(s"Job submitted to re-generate preview from new pdf")
|
||||||
|
|
||||||
|
def generatePageCount(ra: RAttachment): F[Unit] =
|
||||||
|
JobFactory
|
||||||
|
.makePageCount(
|
||||||
|
MakePageCountArgs(ra.id),
|
||||||
|
AccountId(collective, DocspellSystem.user).some
|
||||||
|
)
|
||||||
|
.map(_.encode)
|
||||||
|
.flatMap(jobStore.insert) *>
|
||||||
|
logger.info(s"Job submitted to find page count from new pdf")
|
||||||
|
|
||||||
|
def setFile(ra: RAttachment, rs: RAttachmentSource) =
|
||||||
|
for {
|
||||||
|
_ <- requireMimeType(pdfData, MimeType.pdf)
|
||||||
|
|
||||||
|
newFile <- pdfData
|
||||||
|
.through(
|
||||||
|
store.fileRepo.save(
|
||||||
|
collective,
|
||||||
|
FileCategory.AttachmentConvert,
|
||||||
|
MimeTypeHint.advertised(MimeType.pdf)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.compile
|
||||||
|
.lastOrError
|
||||||
|
|
||||||
|
_ <- store.transact(RAttachment.updateFileId(attachId, newFile))
|
||||||
|
_ <- logger.info(s"Deleting old file for attachment")
|
||||||
|
_ <-
|
||||||
|
if (rs.fileId == ra.fileId) ().pure[F]
|
||||||
|
else store.fileRepo.delete(ra.fileId)
|
||||||
|
_ <-
|
||||||
|
if (regeneratePreview) generatePreview(ra)
|
||||||
|
else ().pure[F]
|
||||||
|
_ <- generatePageCount(ra)
|
||||||
|
} yield ()
|
||||||
|
|
||||||
|
(for {
|
||||||
|
ra <- OptionT(
|
||||||
|
store.transact(RAttachment.findByIdAndCollective(attachId, collective))
|
||||||
|
)
|
||||||
|
rs <- OptionT(
|
||||||
|
store.transact(RAttachmentSource.findByIdAndCollective(attachId, collective))
|
||||||
|
)
|
||||||
|
_ <- OptionT.liftF(setFile(ra, rs))
|
||||||
|
} yield ()).getOrElseF(
|
||||||
|
logger.warn(
|
||||||
|
s"Cannot replace pdf file. Attachment not found for id: ${attachId.id}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def addOrReplacePreview(
|
||||||
|
collective: Ident,
|
||||||
|
attachId: Ident,
|
||||||
|
imageData: Stream[F, Byte]
|
||||||
|
): F[Unit] = {
|
||||||
|
def setFile(ra: RAttachment): F[Unit] =
|
||||||
|
for {
|
||||||
|
_ <- requireMimeType(imageData, MimeType.image("*"))
|
||||||
|
newFile <- imageData
|
||||||
|
.through(
|
||||||
|
store.fileRepo
|
||||||
|
.save(collective, FileCategory.PreviewImage, MimeTypeHint.none)
|
||||||
|
)
|
||||||
|
.compile
|
||||||
|
.lastOrError
|
||||||
|
|
||||||
|
now <- Timestamp.current[F]
|
||||||
|
record = RAttachmentPreview(ra.id, newFile, None, now)
|
||||||
|
oldFile <- store.transact(RAttachmentPreview.upsert(record))
|
||||||
|
_ <- OptionT
|
||||||
|
.fromOption(oldFile)
|
||||||
|
.semiflatMap(store.fileRepo.delete)
|
||||||
|
.getOrElse(())
|
||||||
|
} yield ()
|
||||||
|
|
||||||
|
(for {
|
||||||
|
ra <- OptionT(
|
||||||
|
store.transact(RAttachment.findByIdAndCollective(attachId, collective))
|
||||||
|
)
|
||||||
|
_ <- OptionT.liftF(setFile(ra))
|
||||||
|
} yield ()).getOrElseF(
|
||||||
|
logger.warn(
|
||||||
|
s"Cannot add/replace preview file. Attachment not found for id: ${attachId.id}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private def requireMimeType[F[_]: Sync](
|
||||||
|
data: Stream[F, Byte],
|
||||||
|
expectedMime: MimeType
|
||||||
|
): F[Unit] =
|
||||||
|
TikaMimetype
|
||||||
|
.detect(data, MimeTypeHint.advertised(expectedMime))
|
||||||
|
.flatMap { mime =>
|
||||||
|
if (expectedMime.matches(mime)) ().pure[F]
|
||||||
|
else
|
||||||
|
Sync[F].raiseError(
|
||||||
|
new IllegalArgumentException(
|
||||||
|
s"Expected pdf file, but got: ${mime.asString}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
@ -61,6 +61,12 @@ trait OItem[F[_]] {
|
|||||||
collective: Ident
|
collective: Ident
|
||||||
): F[AttachedEvent[UpdateResult]]
|
): F[AttachedEvent[UpdateResult]]
|
||||||
|
|
||||||
|
def removeTagsOfCategories(
|
||||||
|
item: Ident,
|
||||||
|
collective: Ident,
|
||||||
|
categories: Set[String]
|
||||||
|
): F[AttachedEvent[UpdateResult]]
|
||||||
|
|
||||||
def removeTagsMultipleItems(
|
def removeTagsMultipleItems(
|
||||||
items: Nel[Ident],
|
items: Nel[Ident],
|
||||||
tags: List[String],
|
tags: List[String],
|
||||||
@ -80,11 +86,13 @@ trait OItem[F[_]] {
|
|||||||
collective: Ident
|
collective: Ident
|
||||||
): F[UpdateResult]
|
): F[UpdateResult]
|
||||||
|
|
||||||
def setFolder(item: Ident, folder: Option[Ident], collective: Ident): F[UpdateResult]
|
/** Set or remove the folder on an item. Folder can be the id or name. */
|
||||||
|
def setFolder(item: Ident, folder: Option[String], collective: Ident): F[UpdateResult]
|
||||||
|
|
||||||
|
/** Set or remove the folder on multiple items. Folder can be the id or name. */
|
||||||
def setFolderMultiple(
|
def setFolderMultiple(
|
||||||
items: Nel[Ident],
|
items: Nel[Ident],
|
||||||
folder: Option[Ident],
|
folder: Option[String],
|
||||||
collective: Ident
|
collective: Ident
|
||||||
): F[UpdateResult]
|
): F[UpdateResult]
|
||||||
|
|
||||||
@ -122,6 +130,13 @@ trait OItem[F[_]] {
|
|||||||
|
|
||||||
def setNotes(item: Ident, notes: Option[String], collective: Ident): F[UpdateResult]
|
def setNotes(item: Ident, notes: Option[String], collective: Ident): F[UpdateResult]
|
||||||
|
|
||||||
|
def addNotes(
|
||||||
|
item: Ident,
|
||||||
|
notes: String,
|
||||||
|
separator: Option[String],
|
||||||
|
collective: Ident
|
||||||
|
): F[UpdateResult]
|
||||||
|
|
||||||
def setName(item: Ident, name: String, collective: Ident): F[UpdateResult]
|
def setName(item: Ident, name: String, collective: Ident): F[UpdateResult]
|
||||||
|
|
||||||
def setNameMultiple(
|
def setNameMultiple(
|
||||||
@ -288,6 +303,28 @@ object OItem {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def removeTagsOfCategories(
|
||||||
|
item: Ident,
|
||||||
|
collective: Ident,
|
||||||
|
categories: Set[String]
|
||||||
|
): F[AttachedEvent[UpdateResult]] =
|
||||||
|
if (categories.isEmpty) {
|
||||||
|
AttachedEvent.only(UpdateResult.success).pure[F]
|
||||||
|
} else {
|
||||||
|
val dbtask =
|
||||||
|
for {
|
||||||
|
tags <- RTag.findByItem(item)
|
||||||
|
removeTags = tags.filter(_.category.exists(categories.contains))
|
||||||
|
_ <- RTagItem.removeAllTags(item, removeTags.map(_.tagId))
|
||||||
|
mkEvent = Event.TagsChanged
|
||||||
|
.partial(Nel.of(item), Nil, removeTags.map(_.tagId.id).toList)
|
||||||
|
} yield AttachedEvent(UpdateResult.success)(mkEvent)
|
||||||
|
|
||||||
|
OptionT(store.transact(RItem.checkByIdAndCollective(item, collective)))
|
||||||
|
.semiflatMap(_ => store.transact(dbtask))
|
||||||
|
.getOrElse(AttachedEvent.only(UpdateResult.notFound))
|
||||||
|
}
|
||||||
|
|
||||||
def removeTagsMultipleItems(
|
def removeTagsMultipleItems(
|
||||||
items: Nel[Ident],
|
items: Nel[Ident],
|
||||||
tags: List[String],
|
tags: List[String],
|
||||||
@ -420,21 +457,27 @@ object OItem {
|
|||||||
|
|
||||||
def setFolder(
|
def setFolder(
|
||||||
item: Ident,
|
item: Ident,
|
||||||
folder: Option[Ident],
|
folder: Option[String],
|
||||||
collective: Ident
|
collective: Ident
|
||||||
): F[UpdateResult] =
|
): F[UpdateResult] =
|
||||||
UpdateResult
|
for {
|
||||||
.fromUpdate(
|
result <- store.transact(RItem.updateFolder(item, collective, folder)).attempt
|
||||||
store
|
ures = result.fold(
|
||||||
.transact(RItem.updateFolder(item, collective, folder))
|
UpdateResult.failure,
|
||||||
|
t => UpdateResult.fromUpdateRows(t._1)
|
||||||
)
|
)
|
||||||
.flatTap(
|
_ <- result.fold(
|
||||||
onSuccessIgnoreError(fts.updateFolder(logger, item, collective, folder))
|
_ => ().pure[F],
|
||||||
|
t =>
|
||||||
|
onSuccessIgnoreError(fts.updateFolder(logger, item, collective, t._2))(
|
||||||
|
ures
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
} yield ures
|
||||||
|
|
||||||
def setFolderMultiple(
|
def setFolderMultiple(
|
||||||
items: Nel[Ident],
|
items: Nel[Ident],
|
||||||
folder: Option[Ident],
|
folder: Option[String],
|
||||||
collective: Ident
|
collective: Ident
|
||||||
): F[UpdateResult] =
|
): F[UpdateResult] =
|
||||||
for {
|
for {
|
||||||
@ -615,6 +658,33 @@ object OItem {
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def addNotes(
|
||||||
|
item: Ident,
|
||||||
|
notes: String,
|
||||||
|
separator: Option[String],
|
||||||
|
collective: Ident
|
||||||
|
): F[UpdateResult] =
|
||||||
|
store
|
||||||
|
.transact(RItem.appendNotes(item, collective, notes, separator))
|
||||||
|
.flatMap {
|
||||||
|
case Some(newNotes) =>
|
||||||
|
store
|
||||||
|
.transact(RCollective.findLanguage(collective))
|
||||||
|
.map(_.getOrElse(Language.English))
|
||||||
|
.flatMap(lang =>
|
||||||
|
fts.updateItemNotes(logger, item, collective, lang, newNotes.some)
|
||||||
|
)
|
||||||
|
.attempt
|
||||||
|
.flatMap {
|
||||||
|
case Right(()) => ().pure[F]
|
||||||
|
case Left(ex) =>
|
||||||
|
logger.warn(s"Error updating full-text index: ${ex.getMessage}")
|
||||||
|
}
|
||||||
|
.as(UpdateResult.success)
|
||||||
|
case None =>
|
||||||
|
UpdateResult.notFound.pure[F]
|
||||||
|
}
|
||||||
|
|
||||||
def setName(item: Ident, name: String, collective: Ident): F[UpdateResult] =
|
def setName(item: Ident, name: String, collective: Ident): F[UpdateResult] =
|
||||||
UpdateResult
|
UpdateResult
|
||||||
.fromUpdate(
|
.fromUpdate(
|
||||||
|
@ -6,11 +6,13 @@
|
|||||||
|
|
||||||
package docspell.backend.ops
|
package docspell.backend.ops
|
||||||
|
|
||||||
import cats.Applicative
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.syntax.all._
|
||||||
|
import fs2.Stream
|
||||||
|
|
||||||
import docspell.common.Ident
|
import docspell.common.{Ident, NodeType}
|
||||||
|
import docspell.joexapi.client.JoexClient
|
||||||
|
import docspell.joexapi.model.AddonSupport
|
||||||
import docspell.pubsub.api.PubSubT
|
import docspell.pubsub.api.PubSubT
|
||||||
import docspell.scheduler.msg.{CancelJob, JobsNotify, PeriodicTaskNotify}
|
import docspell.scheduler.msg.{CancelJob, JobsNotify, PeriodicTaskNotify}
|
||||||
|
|
||||||
@ -21,10 +23,16 @@ trait OJoex[F[_]] {
|
|||||||
def notifyPeriodicTasks: F[Unit]
|
def notifyPeriodicTasks: F[Unit]
|
||||||
|
|
||||||
def cancelJob(job: Ident, worker: Ident): F[Unit]
|
def cancelJob(job: Ident, worker: Ident): F[Unit]
|
||||||
|
|
||||||
|
def getAddonSupport: F[List[AddonSupport]]
|
||||||
}
|
}
|
||||||
|
|
||||||
object OJoex {
|
object OJoex {
|
||||||
def apply[F[_]: Applicative](pubSub: PubSubT[F]): Resource[F, OJoex[F]] =
|
def apply[F[_]: Async](
|
||||||
|
pubSub: PubSubT[F],
|
||||||
|
nodes: ONode[F],
|
||||||
|
joexClient: JoexClient[F]
|
||||||
|
): Resource[F, OJoex[F]] =
|
||||||
Resource.pure[F, OJoex[F]](new OJoex[F] {
|
Resource.pure[F, OJoex[F]](new OJoex[F] {
|
||||||
|
|
||||||
def notifyAllNodes: F[Unit] =
|
def notifyAllNodes: F[Unit] =
|
||||||
@ -35,5 +43,17 @@ object OJoex {
|
|||||||
|
|
||||||
def cancelJob(job: Ident, worker: Ident): F[Unit] =
|
def cancelJob(job: Ident, worker: Ident): F[Unit] =
|
||||||
pubSub.publish1IgnoreErrors(CancelJob.topic, CancelJob(job, worker)).as(())
|
pubSub.publish1IgnoreErrors(CancelJob.topic, CancelJob(job, worker)).as(())
|
||||||
|
|
||||||
|
def getAddonSupport: F[List[AddonSupport]] =
|
||||||
|
for {
|
||||||
|
joex <- nodes.getNodes(NodeType.Joex)
|
||||||
|
conc = math.max(2, Runtime.getRuntime.availableProcessors() - 1)
|
||||||
|
supp <- Stream
|
||||||
|
.emits(joex)
|
||||||
|
.covary[F]
|
||||||
|
.parEvalMap(conc)(n => joexClient.getAddonSupport(n.url))
|
||||||
|
.compile
|
||||||
|
.toList
|
||||||
|
} yield supp
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -13,11 +13,27 @@ import docspell.common.{Ident, LenientUri, NodeType}
|
|||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
import docspell.store.records.RNode
|
import docspell.store.records.RNode
|
||||||
|
|
||||||
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
trait ONode[F[_]] {
|
trait ONode[F[_]] {
|
||||||
|
|
||||||
def register(appId: Ident, nodeType: NodeType, uri: LenientUri): F[Unit]
|
def register(
|
||||||
|
appId: Ident,
|
||||||
|
nodeType: NodeType,
|
||||||
|
uri: LenientUri,
|
||||||
|
serverSecret: Option[ByteVector]
|
||||||
|
): F[Unit]
|
||||||
|
|
||||||
def unregister(appId: Ident): F[Unit]
|
def unregister(appId: Ident): F[Unit]
|
||||||
|
|
||||||
|
def withRegistered(
|
||||||
|
appId: Ident,
|
||||||
|
nodeType: NodeType,
|
||||||
|
uri: LenientUri,
|
||||||
|
serverSecret: Option[ByteVector]
|
||||||
|
): Resource[F, Unit]
|
||||||
|
|
||||||
|
def getNodes(nodeType: NodeType): F[Vector[RNode]]
|
||||||
}
|
}
|
||||||
|
|
||||||
object ONode {
|
object ONode {
|
||||||
@ -25,9 +41,14 @@ object ONode {
|
|||||||
def apply[F[_]: Async](store: Store[F]): Resource[F, ONode[F]] =
|
def apply[F[_]: Async](store: Store[F]): Resource[F, ONode[F]] =
|
||||||
Resource.pure[F, ONode[F]](new ONode[F] {
|
Resource.pure[F, ONode[F]](new ONode[F] {
|
||||||
val logger = docspell.logging.getLogger[F]
|
val logger = docspell.logging.getLogger[F]
|
||||||
def register(appId: Ident, nodeType: NodeType, uri: LenientUri): F[Unit] =
|
def register(
|
||||||
|
appId: Ident,
|
||||||
|
nodeType: NodeType,
|
||||||
|
uri: LenientUri,
|
||||||
|
serverSecret: Option[ByteVector]
|
||||||
|
): F[Unit] =
|
||||||
for {
|
for {
|
||||||
node <- RNode(appId, nodeType, uri)
|
node <- RNode(appId, nodeType, uri, serverSecret)
|
||||||
_ <- logger.info(s"Registering node ${node.id.id}")
|
_ <- logger.info(s"Registering node ${node.id.id}")
|
||||||
_ <- store.transact(RNode.set(node))
|
_ <- store.transact(RNode.set(node))
|
||||||
} yield ()
|
} yield ()
|
||||||
@ -35,6 +56,19 @@ object ONode {
|
|||||||
def unregister(appId: Ident): F[Unit] =
|
def unregister(appId: Ident): F[Unit] =
|
||||||
logger.info(s"Unregister app ${appId.id}") *>
|
logger.info(s"Unregister app ${appId.id}") *>
|
||||||
store.transact(RNode.delete(appId)).map(_ => ())
|
store.transact(RNode.delete(appId)).map(_ => ())
|
||||||
|
|
||||||
|
def withRegistered(
|
||||||
|
appId: Ident,
|
||||||
|
nodeType: NodeType,
|
||||||
|
uri: LenientUri,
|
||||||
|
serverSecret: Option[ByteVector]
|
||||||
|
): Resource[F, Unit] =
|
||||||
|
Resource.make(register(appId, nodeType, uri, serverSecret))(_ =>
|
||||||
|
unregister(appId)
|
||||||
|
)
|
||||||
|
|
||||||
|
def getNodes(nodeType: NodeType): F[Vector[RNode]] =
|
||||||
|
store.transact(RNode.findAll(nodeType))
|
||||||
})
|
})
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -9,8 +9,9 @@ package docspell.common
|
|||||||
import java.time.Instant
|
import java.time.Instant
|
||||||
|
|
||||||
import io.circe._
|
import io.circe._
|
||||||
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
object BaseJsonCodecs {
|
trait BaseJsonCodecs {
|
||||||
|
|
||||||
implicit val encodeInstantEpoch: Encoder[Instant] =
|
implicit val encodeInstantEpoch: Encoder[Instant] =
|
||||||
Encoder.encodeJavaLong.contramap(_.toEpochMilli)
|
Encoder.encodeJavaLong.contramap(_.toEpochMilli)
|
||||||
@ -18,4 +19,11 @@ object BaseJsonCodecs {
|
|||||||
implicit val decodeInstantEpoch: Decoder[Instant] =
|
implicit val decodeInstantEpoch: Decoder[Instant] =
|
||||||
Decoder.decodeLong.map(Instant.ofEpochMilli)
|
Decoder.decodeLong.map(Instant.ofEpochMilli)
|
||||||
|
|
||||||
|
implicit val byteVectorEncoder: Encoder[ByteVector] =
|
||||||
|
Encoder.encodeString.contramap(_.toBase64)
|
||||||
|
|
||||||
|
implicit val byteVectorDecoder: Decoder[ByteVector] =
|
||||||
|
Decoder.decodeString.emap(ByteVector.fromBase64Descriptive(_))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
object BaseJsonCodecs extends BaseJsonCodecs
|
||||||
|
@ -18,6 +18,18 @@ final case class Binary[F[_]](name: String, mime: MimeType, data: Stream[F, Byte
|
|||||||
|
|
||||||
def withMime(mime: MimeType): Binary[F] =
|
def withMime(mime: MimeType): Binary[F] =
|
||||||
copy(mime = mime)
|
copy(mime = mime)
|
||||||
|
|
||||||
|
/** Return the extension of `name` if available (without the dot) */
|
||||||
|
def extension: Option[String] =
|
||||||
|
name.lastIndexOf('.') match {
|
||||||
|
case n if n > 0 =>
|
||||||
|
Some(name.substring(n + 1))
|
||||||
|
case _ =>
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
def extensionIn(extensions: Set[String]): Boolean =
|
||||||
|
extension.exists(extensions.contains)
|
||||||
}
|
}
|
||||||
|
|
||||||
object Binary {
|
object Binary {
|
||||||
|
@ -32,6 +32,7 @@ object FileCategory {
|
|||||||
case object PreviewImage extends FileCategory
|
case object PreviewImage extends FileCategory
|
||||||
case object Classifier extends FileCategory
|
case object Classifier extends FileCategory
|
||||||
case object DownloadAll extends FileCategory
|
case object DownloadAll extends FileCategory
|
||||||
|
case object Addon extends FileCategory
|
||||||
|
|
||||||
val all: NonEmptyList[FileCategory] =
|
val all: NonEmptyList[FileCategory] =
|
||||||
NonEmptyList.of(
|
NonEmptyList.of(
|
||||||
@ -39,7 +40,8 @@ object FileCategory {
|
|||||||
AttachmentConvert,
|
AttachmentConvert,
|
||||||
PreviewImage,
|
PreviewImage,
|
||||||
Classifier,
|
Classifier,
|
||||||
DownloadAll
|
DownloadAll,
|
||||||
|
Addon
|
||||||
)
|
)
|
||||||
|
|
||||||
def fromString(str: String): Either[String, FileCategory] =
|
def fromString(str: String): Either[String, FileCategory] =
|
||||||
|
@ -32,7 +32,8 @@ object Glob {
|
|||||||
def single(str: String) =
|
def single(str: String) =
|
||||||
PatternGlob(Pattern(split(str, separator).map(makeSegment)))
|
PatternGlob(Pattern(split(str, separator).map(makeSegment)))
|
||||||
|
|
||||||
if (in == "*") all
|
if (in == all.asString) all
|
||||||
|
else if (in == none.asString) none
|
||||||
else
|
else
|
||||||
split(in, anyChar) match {
|
split(in, anyChar) match {
|
||||||
case NonEmptyList(_, Nil) =>
|
case NonEmptyList(_, Nil) =>
|
||||||
@ -51,15 +52,25 @@ object Glob {
|
|||||||
val asString = "*"
|
val asString = "*"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
val none = new Glob {
|
||||||
|
def matches(caseSensitive: Boolean)(in: String) = false
|
||||||
|
def matchFilenameOrPath(in: String) = false
|
||||||
|
def asString = "!*"
|
||||||
|
}
|
||||||
|
|
||||||
def pattern(pattern: Pattern): Glob =
|
def pattern(pattern: Pattern): Glob =
|
||||||
PatternGlob(pattern)
|
PatternGlob(pattern)
|
||||||
|
|
||||||
/** A simple glob supporting `*` and `?`. */
|
/** A simple glob supporting `*` and `?`. */
|
||||||
final private case class PatternGlob(pattern: Pattern) extends Glob {
|
final private case class PatternGlob(pattern: Pattern) extends Glob {
|
||||||
def matches(caseSensitive: Boolean)(in: String): Boolean =
|
def matches(caseSensitive: Boolean)(in: String): Boolean = {
|
||||||
|
val input = Glob.split(in, Glob.separator)
|
||||||
|
|
||||||
|
pattern.parts.size == input.size &&
|
||||||
pattern.parts
|
pattern.parts
|
||||||
.zipWith(Glob.split(in, Glob.separator))(_.matches(caseSensitive)(_))
|
.zipWith(input)(_.matches(caseSensitive)(_))
|
||||||
.forall(identity)
|
.forall(identity)
|
||||||
|
}
|
||||||
|
|
||||||
def matchFilenameOrPath(in: String): Boolean =
|
def matchFilenameOrPath(in: String): Boolean =
|
||||||
if (pattern.parts.tail.isEmpty) matches(true)(split(in, separator).last)
|
if (pattern.parts.tail.isEmpty) matches(true)(split(in, separator).last)
|
||||||
@ -67,6 +78,8 @@ object Glob {
|
|||||||
|
|
||||||
def asString: String =
|
def asString: String =
|
||||||
pattern.asString
|
pattern.asString
|
||||||
|
|
||||||
|
override def toString = s"PatternGlob($asString)"
|
||||||
}
|
}
|
||||||
|
|
||||||
final private case class AnyGlob(globs: NonEmptyList[Glob]) extends Glob {
|
final private case class AnyGlob(globs: NonEmptyList[Glob]) extends Glob {
|
||||||
@ -76,6 +89,8 @@ object Glob {
|
|||||||
globs.exists(_.matchFilenameOrPath(in))
|
globs.exists(_.matchFilenameOrPath(in))
|
||||||
def asString =
|
def asString =
|
||||||
globs.toList.map(_.asString).mkString(anyChar.toString)
|
globs.toList.map(_.asString).mkString(anyChar.toString)
|
||||||
|
|
||||||
|
override def toString = s"AnyGlob($globs)"
|
||||||
}
|
}
|
||||||
|
|
||||||
case class Pattern(parts: NonEmptyList[Segment]) {
|
case class Pattern(parts: NonEmptyList[Segment]) {
|
||||||
|
@ -26,6 +26,9 @@ case class Ident(id: String) {
|
|||||||
|
|
||||||
def /(next: Ident): Ident =
|
def /(next: Ident): Ident =
|
||||||
new Ident(id + Ident.concatChar + next.id)
|
new Ident(id + Ident.concatChar + next.id)
|
||||||
|
|
||||||
|
def take(n: Int): Ident =
|
||||||
|
new Ident(id.take(n))
|
||||||
}
|
}
|
||||||
|
|
||||||
object Ident {
|
object Ident {
|
||||||
|
@ -0,0 +1,28 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common
|
||||||
|
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
/** Arguments to submit a task that runs addons configured for some existing item.
|
||||||
|
*
|
||||||
|
* If `addonTaskIds` is non empty, only these addon tasks are run. Otherwise all addon
|
||||||
|
* tasks that are configured for 'existing-item' are run.
|
||||||
|
*/
|
||||||
|
final case class ItemAddonTaskArgs(
|
||||||
|
collective: Ident,
|
||||||
|
itemId: Ident,
|
||||||
|
addonRunConfigs: Set[Ident]
|
||||||
|
)
|
||||||
|
|
||||||
|
object ItemAddonTaskArgs {
|
||||||
|
val taskName: Ident = Ident.unsafe("addon-existing-item")
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[ItemAddonTaskArgs] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[ItemAddonTaskArgs] = deriveEncoder
|
||||||
|
}
|
@ -6,6 +6,8 @@
|
|||||||
|
|
||||||
package docspell.common
|
package docspell.common
|
||||||
|
|
||||||
|
import fs2.io.file.Path
|
||||||
|
|
||||||
case class MimeTypeHint(filename: Option[String], advertised: Option[String]) {
|
case class MimeTypeHint(filename: Option[String], advertised: Option[String]) {
|
||||||
|
|
||||||
def withName(name: String): MimeTypeHint =
|
def withName(name: String): MimeTypeHint =
|
||||||
@ -21,6 +23,9 @@ object MimeTypeHint {
|
|||||||
def filename(name: String): MimeTypeHint =
|
def filename(name: String): MimeTypeHint =
|
||||||
MimeTypeHint(Some(name), None)
|
MimeTypeHint(Some(name), None)
|
||||||
|
|
||||||
|
def filename(file: Path): MimeTypeHint =
|
||||||
|
filename(file.fileName.toString)
|
||||||
|
|
||||||
def advertised(mimeType: MimeType): MimeTypeHint =
|
def advertised(mimeType: MimeType): MimeTypeHint =
|
||||||
advertised(mimeType.asString)
|
advertised(mimeType.asString)
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ import io.circe.generic.semiauto._
|
|||||||
* This task is run for each new file to create a new item from it or to add this file as
|
* This task is run for each new file to create a new item from it or to add this file as
|
||||||
* an attachment to an existing item.
|
* an attachment to an existing item.
|
||||||
*
|
*
|
||||||
* If the `itemId' is set to some value, the item is tried to load to ammend with the
|
* If the `itemId' is set to some value, the item is tried to load to amend with the
|
||||||
* given files. Otherwise a new item is created.
|
* given files. Otherwise a new item is created.
|
||||||
*
|
*
|
||||||
* It is also re-used by the 'ReProcessItem' task.
|
* It is also re-used by the 'ReProcessItem' task.
|
||||||
|
@ -0,0 +1,19 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common
|
||||||
|
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
final case class ScheduledAddonTaskArgs(collective: Ident, addonTaskId: Ident)
|
||||||
|
|
||||||
|
object ScheduledAddonTaskArgs {
|
||||||
|
val taskName: Ident = Ident.unsafe("addon-scheduled-task")
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[ScheduledAddonTaskArgs] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[ScheduledAddonTaskArgs] = deriveEncoder
|
||||||
|
}
|
@ -17,11 +17,23 @@ import cats.implicits._
|
|||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
import fs2.{Stream, io, text}
|
import fs2.{Stream, io, text}
|
||||||
|
|
||||||
|
import docspell.common.{exec => newExec}
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
// better use `SysCmd` and `SysExec`
|
||||||
object SystemCommand {
|
object SystemCommand {
|
||||||
|
|
||||||
final case class Config(program: String, args: Seq[String], timeout: Duration) {
|
final case class Config(
|
||||||
|
program: String,
|
||||||
|
args: Seq[String],
|
||||||
|
timeout: Duration,
|
||||||
|
env: Map[String, String] = Map.empty
|
||||||
|
) {
|
||||||
|
|
||||||
|
def toSysCmd = newExec
|
||||||
|
.SysCmd(program, newExec.Args(args))
|
||||||
|
.withTimeout(timeout)
|
||||||
|
.addEnv(newExec.Env(env))
|
||||||
|
|
||||||
def mapArgs(f: String => String): Config =
|
def mapArgs(f: String => String): Config =
|
||||||
Config(program, args.map(f), timeout)
|
Config(program, args.map(f), timeout)
|
||||||
@ -33,6 +45,18 @@ object SystemCommand {
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def withEnv(key: String, value: String): Config =
|
||||||
|
copy(env = env.updated(key, value))
|
||||||
|
|
||||||
|
def addEnv(moreEnv: Map[String, String]): Config =
|
||||||
|
copy(env = env ++ moreEnv)
|
||||||
|
|
||||||
|
def appendArgs(extraArgs: Args): Config =
|
||||||
|
copy(args = args ++ extraArgs.args)
|
||||||
|
|
||||||
|
def appendArgs(extraArgs: Seq[String]): Config =
|
||||||
|
copy(args = args ++ extraArgs)
|
||||||
|
|
||||||
def toCmd: List[String] =
|
def toCmd: List[String] =
|
||||||
program :: args.toList
|
program :: args.toList
|
||||||
|
|
||||||
@ -40,6 +64,45 @@ object SystemCommand {
|
|||||||
toCmd.mkString(" ")
|
toCmd.mkString(" ")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
final case class Args(args: Vector[String]) extends Iterable[String] {
|
||||||
|
override def iterator = args.iterator
|
||||||
|
|
||||||
|
def prepend(a: String): Args = Args(a +: args)
|
||||||
|
|
||||||
|
def prependWhen(flag: Boolean)(a: String): Args =
|
||||||
|
prependOption(Option.when(flag)(a))
|
||||||
|
|
||||||
|
def prependOption(value: Option[String]): Args =
|
||||||
|
value.map(prepend).getOrElse(this)
|
||||||
|
|
||||||
|
def append(a: String, as: String*): Args =
|
||||||
|
Args(args ++ (a +: as.toVector))
|
||||||
|
|
||||||
|
def appendOption(value: Option[String]): Args =
|
||||||
|
value.map(append(_)).getOrElse(this)
|
||||||
|
|
||||||
|
def appendOptionVal(first: String, second: Option[String]): Args =
|
||||||
|
second.map(b => append(first, b)).getOrElse(this)
|
||||||
|
|
||||||
|
def appendWhen(flag: Boolean)(a: String, as: String*): Args =
|
||||||
|
if (flag) append(a, as: _*) else this
|
||||||
|
|
||||||
|
def appendWhenNot(flag: Boolean)(a: String, as: String*): Args =
|
||||||
|
if (!flag) append(a, as: _*) else this
|
||||||
|
|
||||||
|
def append(p: Path): Args =
|
||||||
|
append(p.toString)
|
||||||
|
|
||||||
|
def append(as: Iterable[String]): Args =
|
||||||
|
Args(args ++ as.toVector)
|
||||||
|
}
|
||||||
|
object Args {
|
||||||
|
val empty: Args = Args()
|
||||||
|
|
||||||
|
def apply(as: String*): Args =
|
||||||
|
Args(as.toVector)
|
||||||
|
}
|
||||||
|
|
||||||
final case class Result(rc: Int, stdout: String, stderr: String)
|
final case class Result(rc: Int, stdout: String, stderr: String)
|
||||||
|
|
||||||
def exec[F[_]: Sync](
|
def exec[F[_]: Sync](
|
||||||
@ -104,6 +167,10 @@ object SystemCommand {
|
|||||||
.redirectError(Redirect.PIPE)
|
.redirectError(Redirect.PIPE)
|
||||||
.redirectOutput(Redirect.PIPE)
|
.redirectOutput(Redirect.PIPE)
|
||||||
|
|
||||||
|
val pbEnv = pb.environment()
|
||||||
|
cmd.env.foreach { case (key, value) =>
|
||||||
|
pbEnv.put(key, value)
|
||||||
|
}
|
||||||
wd.map(_.toNioPath.toFile).foreach(pb.directory)
|
wd.map(_.toNioPath.toFile).foreach(pb.directory)
|
||||||
pb.start()
|
pb.start()
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,94 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
import cats.kernel.Monoid
|
||||||
|
import cats.syntax.all._
|
||||||
|
|
||||||
|
trait UrlMatcher {
|
||||||
|
def matches(url: LenientUri): Boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
object UrlMatcher {
|
||||||
|
val True = instance(_ => true)
|
||||||
|
val False = instance(_ => false)
|
||||||
|
|
||||||
|
def instance(f: LenientUri => Boolean): UrlMatcher =
|
||||||
|
(url: LenientUri) => f(url)
|
||||||
|
|
||||||
|
def fromString(str: String): Either[String, UrlMatcher] =
|
||||||
|
if (str == "") False.asRight
|
||||||
|
else if (str == "*") True.asRight
|
||||||
|
else LenientUri.parse(str).map(fromUrl)
|
||||||
|
|
||||||
|
def unsafeFromString(str: String): UrlMatcher =
|
||||||
|
fromString(str).fold(sys.error, identity)
|
||||||
|
|
||||||
|
def fromStringList(str: List[String]): Either[String, UrlMatcher] =
|
||||||
|
str match {
|
||||||
|
case Nil => False.asRight
|
||||||
|
case _ => str.map(_.trim).traverse(fromString).map(_.combineAll)
|
||||||
|
}
|
||||||
|
|
||||||
|
def fromUrl(url: LenientUri): UrlMatcher = {
|
||||||
|
val schemeGlob = Glob(url.scheme.head)
|
||||||
|
val hostGlob = HostGlob(url.host)
|
||||||
|
val pathGlob = Glob(url.path.asString)
|
||||||
|
new Impl(schemeGlob, hostGlob, pathGlob, url.path.segments.size)
|
||||||
|
}
|
||||||
|
|
||||||
|
def any(ulrm: IterableOnce[UrlMatcher]): UrlMatcher =
|
||||||
|
anyMonoid.combineAll(ulrm)
|
||||||
|
|
||||||
|
def all(urlm: IterableOnce[UrlMatcher]): UrlMatcher =
|
||||||
|
allMonoid.combineAll(urlm)
|
||||||
|
|
||||||
|
val anyMonoid: Monoid[UrlMatcher] =
|
||||||
|
Monoid.instance(False, (a, b) => instance(url => a.matches(url) || b.matches(url)))
|
||||||
|
|
||||||
|
val allMonoid: Monoid[UrlMatcher] =
|
||||||
|
Monoid.instance(True, (a, b) => instance(url => a.matches(url) && b.matches(url)))
|
||||||
|
|
||||||
|
implicit val defaultMonoid: Monoid[UrlMatcher] = anyMonoid
|
||||||
|
|
||||||
|
private class Impl(scheme: Glob, host: HostGlob, path: Glob, pathSegmentCount: Int)
|
||||||
|
extends UrlMatcher {
|
||||||
|
def matches(url: LenientUri) = {
|
||||||
|
// strip path to only match prefixes
|
||||||
|
val mPath: LenientUri.Path =
|
||||||
|
NonEmptyList.fromList(url.path.segments.take(pathSegmentCount)) match {
|
||||||
|
case Some(nel) => LenientUri.NonEmptyPath(nel)
|
||||||
|
case None => LenientUri.RootPath
|
||||||
|
}
|
||||||
|
|
||||||
|
url.scheme.forall(scheme.matches(false)) &&
|
||||||
|
host.matches(url.host) &&
|
||||||
|
path.matchFilenameOrPath(mPath.asString)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private class HostGlob(glob: Option[Glob]) {
|
||||||
|
def matches(host: Option[String]): Boolean =
|
||||||
|
(glob, host) match {
|
||||||
|
case (Some(pattern), Some(word)) =>
|
||||||
|
pattern.matches(false)(HostGlob.prepareHost(word))
|
||||||
|
case (None, None) => true
|
||||||
|
case _ => false
|
||||||
|
}
|
||||||
|
|
||||||
|
override def toString = s"HostGlob(${glob.map(_.asString)})"
|
||||||
|
}
|
||||||
|
|
||||||
|
private object HostGlob {
|
||||||
|
def apply(hostPattern: Option[String]): HostGlob =
|
||||||
|
new HostGlob(hostPattern.map(p => Glob(prepareHost(p))))
|
||||||
|
|
||||||
|
private def prepareHost(host: String): String =
|
||||||
|
host.replace('.', '/')
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,35 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common
|
||||||
|
|
||||||
|
import cats.ApplicativeError
|
||||||
|
import cats.effect._
|
||||||
|
import fs2.Stream
|
||||||
|
|
||||||
|
trait UrlReader[F[_]] {
|
||||||
|
def apply(url: LenientUri): Stream[F, Byte]
|
||||||
|
}
|
||||||
|
|
||||||
|
object UrlReader {
|
||||||
|
|
||||||
|
def instance[F[_]](f: LenientUri => Stream[F, Byte]): UrlReader[F] =
|
||||||
|
(url: LenientUri) => f(url)
|
||||||
|
|
||||||
|
def failWith[F[_]](
|
||||||
|
message: String
|
||||||
|
)(implicit F: ApplicativeError[F, Throwable]): UrlReader[F] =
|
||||||
|
instance(url =>
|
||||||
|
Stream.raiseError(
|
||||||
|
new IllegalStateException(s"Unable to read '${url.asString}': $message")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def apply[F[_]](implicit r: UrlReader[F]): UrlReader[F] = r
|
||||||
|
|
||||||
|
implicit def defaultReader[F[_]: Sync]: UrlReader[F] =
|
||||||
|
instance(_.readURL[F](8192))
|
||||||
|
}
|
@ -0,0 +1,30 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common.bc
|
||||||
|
|
||||||
|
import io.circe.generic.extras.Configuration
|
||||||
|
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
sealed trait AttachmentAction {}
|
||||||
|
|
||||||
|
object AttachmentAction {
|
||||||
|
|
||||||
|
implicit val deriveConfig: Configuration =
|
||||||
|
Configuration.default.withDiscriminator("action").withKebabCaseConstructorNames
|
||||||
|
|
||||||
|
case class SetExtractedText(text: Option[String]) extends AttachmentAction
|
||||||
|
object SetExtractedText {
|
||||||
|
implicit val jsonDecoder: Decoder[SetExtractedText] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[SetExtractedText] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[AttachmentAction] = deriveConfiguredDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[AttachmentAction] = deriveConfiguredEncoder
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,44 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common.bc
|
||||||
|
|
||||||
|
import docspell.common.Ident
|
||||||
|
|
||||||
|
import io.circe.generic.extras.Configuration
|
||||||
|
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
sealed trait BackendCommand {}
|
||||||
|
|
||||||
|
object BackendCommand {
|
||||||
|
|
||||||
|
implicit val deriveConfig: Configuration =
|
||||||
|
Configuration.default.withDiscriminator("command").withKebabCaseConstructorNames
|
||||||
|
|
||||||
|
case class ItemUpdate(itemId: Ident, actions: List[ItemAction]) extends BackendCommand
|
||||||
|
object ItemUpdate {
|
||||||
|
implicit val jsonDecoder: Decoder[ItemUpdate] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[ItemUpdate] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
def item(itemId: Ident, actions: List[ItemAction]): BackendCommand =
|
||||||
|
ItemUpdate(itemId, actions)
|
||||||
|
|
||||||
|
case class AttachmentUpdate(
|
||||||
|
itemId: Ident,
|
||||||
|
attachId: Ident,
|
||||||
|
actions: List[AttachmentAction]
|
||||||
|
) extends BackendCommand
|
||||||
|
object AttachmentUpdate {
|
||||||
|
implicit val jsonDecoder: Decoder[AttachmentUpdate] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[AttachmentUpdate] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[BackendCommand] = deriveConfiguredDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[BackendCommand] = deriveConfiguredEncoder
|
||||||
|
}
|
@ -0,0 +1,17 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common.bc
|
||||||
|
|
||||||
|
import docspell.common.Ident
|
||||||
|
|
||||||
|
trait BackendCommandRunner[F[_], A] {
|
||||||
|
|
||||||
|
def run(collective: Ident, cmd: BackendCommand): F[A]
|
||||||
|
|
||||||
|
def runAll(collective: Ident, cmds: List[BackendCommand]): F[A]
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,102 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common.bc
|
||||||
|
|
||||||
|
import docspell.common.Ident
|
||||||
|
|
||||||
|
import io.circe.generic.extras.Configuration
|
||||||
|
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
sealed trait ItemAction {}
|
||||||
|
|
||||||
|
object ItemAction {
|
||||||
|
implicit val deriveConfig: Configuration =
|
||||||
|
Configuration.default.withDiscriminator("action").withKebabCaseConstructorNames
|
||||||
|
|
||||||
|
case class AddTags(tags: Set[String]) extends ItemAction
|
||||||
|
object AddTags {
|
||||||
|
implicit val jsonDecoder: Decoder[AddTags] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[AddTags] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class ReplaceTags(tags: Set[String]) extends ItemAction
|
||||||
|
object ReplaceTags {
|
||||||
|
implicit val jsonDecoder: Decoder[ReplaceTags] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[ReplaceTags] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class RemoveTags(tags: Set[String]) extends ItemAction
|
||||||
|
object RemoveTags {
|
||||||
|
implicit val jsonDecoder: Decoder[RemoveTags] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[RemoveTags] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class RemoveTagsCategory(categories: Set[String]) extends ItemAction
|
||||||
|
object RemoveTagsCategory {
|
||||||
|
implicit val jsonDecoder: Decoder[RemoveTagsCategory] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[RemoveTagsCategory] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class SetFolder(folder: Option[String]) extends ItemAction
|
||||||
|
object SetFolder {
|
||||||
|
implicit val jsonDecoder: Decoder[SetFolder] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[SetFolder] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class SetCorrOrg(id: Option[Ident]) extends ItemAction
|
||||||
|
object SetCorrOrg {
|
||||||
|
implicit val jsonDecoder: Decoder[SetCorrOrg] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[SetCorrOrg] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class SetCorrPerson(id: Option[Ident]) extends ItemAction
|
||||||
|
object SetCorrPerson {
|
||||||
|
implicit val jsonDecoder: Decoder[SetCorrPerson] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[SetCorrPerson] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class SetConcPerson(id: Option[Ident]) extends ItemAction
|
||||||
|
object SetConcPerson {
|
||||||
|
implicit val jsonDecoder: Decoder[SetConcPerson] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[SetConcPerson] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class SetConcEquipment(id: Option[Ident]) extends ItemAction
|
||||||
|
object SetConcEquipment {
|
||||||
|
implicit val jsonDecoder: Decoder[SetConcEquipment] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[SetConcEquipment] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class SetField(field: Ident, value: String) extends ItemAction
|
||||||
|
object SetField {
|
||||||
|
implicit val jsonDecoder: Decoder[SetField] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[SetField] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class SetName(name: String) extends ItemAction
|
||||||
|
object SetName {
|
||||||
|
implicit val jsonDecoder: Decoder[SetName] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[SetName] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class SetNotes(notes: Option[String]) extends ItemAction
|
||||||
|
object SetNotes {
|
||||||
|
implicit val jsonDecoder: Decoder[SetNotes] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[SetNotes] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
case class AddNotes(notes: String, separator: Option[String]) extends ItemAction
|
||||||
|
object AddNotes {
|
||||||
|
implicit val jsonDecoder: Decoder[AddNotes] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[AddNotes] = deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[ItemAction] = deriveConfiguredDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[ItemAction] = deriveConfiguredEncoder
|
||||||
|
}
|
@ -0,0 +1,49 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common.exec
|
||||||
|
|
||||||
|
import fs2.io.file.Path
|
||||||
|
|
||||||
|
case class Args(values: Seq[String]) {
|
||||||
|
|
||||||
|
def option(key: String, value: String): Args =
|
||||||
|
Args(values ++ Seq(key, value))
|
||||||
|
|
||||||
|
def option(key: String, value: Option[String]): Args =
|
||||||
|
value.map(v => option(key, v)).getOrElse(this)
|
||||||
|
|
||||||
|
def appendOpt(v: Option[String]): Args =
|
||||||
|
v.map(e => Args(values :+ e)).getOrElse(this)
|
||||||
|
|
||||||
|
def append(v: String, vs: String*): Args =
|
||||||
|
Args(values ++ (v +: vs))
|
||||||
|
|
||||||
|
def append(path: Path): Args =
|
||||||
|
append(path.toString)
|
||||||
|
|
||||||
|
def append(args: Args): Args =
|
||||||
|
Args(values ++ args.values)
|
||||||
|
|
||||||
|
def append(args: Seq[String]): Args =
|
||||||
|
Args(values ++ args)
|
||||||
|
|
||||||
|
def prepend(v: String): Args =
|
||||||
|
Args(v +: values)
|
||||||
|
|
||||||
|
def prependWhen(flag: Boolean)(v: String) =
|
||||||
|
if (flag) prepend(v) else this
|
||||||
|
|
||||||
|
def cmdString: String =
|
||||||
|
values.mkString(" ")
|
||||||
|
}
|
||||||
|
|
||||||
|
object Args {
|
||||||
|
val empty: Args = Args(Seq.empty)
|
||||||
|
|
||||||
|
def of(v: String*): Args =
|
||||||
|
Args(v)
|
||||||
|
}
|
37
modules/common/src/main/scala/docspell/common/exec/Env.scala
Normal file
37
modules/common/src/main/scala/docspell/common/exec/Env.scala
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common.exec
|
||||||
|
|
||||||
|
case class Env(values: Map[String, String]) {
|
||||||
|
|
||||||
|
def add(name: String, value: String): Env =
|
||||||
|
copy(values.updated(name, value))
|
||||||
|
|
||||||
|
def addAll(v: Map[String, String]): Env =
|
||||||
|
Env(values ++ v)
|
||||||
|
|
||||||
|
def addAll(e: Env): Env =
|
||||||
|
Env(values ++ e.values)
|
||||||
|
|
||||||
|
def ++(e: Env) = addAll(e)
|
||||||
|
|
||||||
|
def foreach(f: (String, String) => Unit): Unit =
|
||||||
|
values.foreach(t => f(t._1, t._2))
|
||||||
|
|
||||||
|
def map[A](f: (String, String) => A): Seq[A] =
|
||||||
|
values.map(f.tupled).toSeq
|
||||||
|
|
||||||
|
def mapConcat[A](f: (String, String) => Seq[A]): Seq[A] =
|
||||||
|
values.flatMap(f.tupled).toSeq
|
||||||
|
}
|
||||||
|
|
||||||
|
object Env {
|
||||||
|
val empty: Env = Env(Map.empty)
|
||||||
|
|
||||||
|
def of(nv: (String, String)*): Env =
|
||||||
|
Env(Map(nv: _*))
|
||||||
|
}
|
@ -0,0 +1,43 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common.exec
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
|
||||||
|
final case class SysCmd(
|
||||||
|
program: String,
|
||||||
|
args: Args,
|
||||||
|
env: Env,
|
||||||
|
timeout: Duration
|
||||||
|
) {
|
||||||
|
|
||||||
|
def withArgs(f: Args => Args): SysCmd =
|
||||||
|
copy(args = f(args))
|
||||||
|
|
||||||
|
def withTimeout(to: Duration): SysCmd =
|
||||||
|
copy(timeout = to)
|
||||||
|
|
||||||
|
def withEnv(f: Env => Env): SysCmd =
|
||||||
|
copy(env = f(env))
|
||||||
|
|
||||||
|
def addEnv(env: Env): SysCmd =
|
||||||
|
withEnv(_.addAll(env))
|
||||||
|
|
||||||
|
def cmdString: String =
|
||||||
|
s"$program ${args.cmdString}"
|
||||||
|
|
||||||
|
private[exec] def toCmd: Seq[String] =
|
||||||
|
program +: args.values
|
||||||
|
}
|
||||||
|
|
||||||
|
object SysCmd {
|
||||||
|
def apply(prg: String, args: String*): SysCmd =
|
||||||
|
apply(prg, Args(args))
|
||||||
|
|
||||||
|
def apply(prg: String, args: Args): SysCmd =
|
||||||
|
SysCmd(prg, args, Env.empty, Duration.minutes(2))
|
||||||
|
}
|
163
modules/common/src/main/scala/docspell/common/exec/SysExec.scala
Normal file
163
modules/common/src/main/scala/docspell/common/exec/SysExec.scala
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common.exec
|
||||||
|
|
||||||
|
import java.lang.ProcessBuilder.Redirect
|
||||||
|
import java.util.concurrent.TimeUnit
|
||||||
|
|
||||||
|
import scala.concurrent.TimeoutException
|
||||||
|
import scala.jdk.CollectionConverters._
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.Path
|
||||||
|
import fs2.{Pipe, Stream}
|
||||||
|
|
||||||
|
import docspell.common.Duration
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
trait SysExec[F[_]] {
|
||||||
|
|
||||||
|
def stdout: Stream[F, Byte]
|
||||||
|
|
||||||
|
def stdoutLines: Stream[F, String] =
|
||||||
|
stdout
|
||||||
|
.through(fs2.text.utf8.decode)
|
||||||
|
.through(fs2.text.lines)
|
||||||
|
|
||||||
|
def stderr: Stream[F, Byte]
|
||||||
|
|
||||||
|
def stderrLines: Stream[F, String] =
|
||||||
|
stderr
|
||||||
|
.through(fs2.text.utf8.decode)
|
||||||
|
.through(fs2.text.lines)
|
||||||
|
|
||||||
|
def waitFor(timeout: Option[Duration] = None): F[Int]
|
||||||
|
|
||||||
|
/** Sends a signal to the process to terminate it immediately */
|
||||||
|
def cancel: F[Unit]
|
||||||
|
|
||||||
|
/** Consume lines of output of the process in background. */
|
||||||
|
def consumeOutputs(out: Pipe[F, String, Unit], err: Pipe[F, String, Unit])(implicit
|
||||||
|
F: Async[F]
|
||||||
|
): Resource[F, SysExec[F]]
|
||||||
|
|
||||||
|
/** Consumes stderr lines (left) and stdout lines (right) in a background thread. */
|
||||||
|
def consumeOutputs(
|
||||||
|
m: Either[String, String] => F[Unit]
|
||||||
|
)(implicit F: Async[F]): Resource[F, SysExec[F]] = {
|
||||||
|
val pe: Pipe[F, String, Unit] = _.map(_.asLeft).evalMap(m)
|
||||||
|
val po: Pipe[F, String, Unit] = _.map(_.asRight).evalMap(m)
|
||||||
|
consumeOutputs(po, pe)
|
||||||
|
}
|
||||||
|
|
||||||
|
def logOutputs(logger: Logger[F], name: String)(implicit F: Async[F]) =
|
||||||
|
consumeOutputs {
|
||||||
|
case Right(line) => logger.debug(s"[$name (out)]: $line")
|
||||||
|
case Left(line) => logger.debug(s"[$name (err)]: $line")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object SysExec {
|
||||||
|
private val readChunkSz = 8 * 1024
|
||||||
|
|
||||||
|
def apply[F[_]: Sync](
|
||||||
|
cmd: SysCmd,
|
||||||
|
logger: Logger[F],
|
||||||
|
workdir: Option[Path] = None,
|
||||||
|
stdin: Option[Stream[F, Byte]] = None
|
||||||
|
): Resource[F, SysExec[F]] =
|
||||||
|
for {
|
||||||
|
proc <- startProcess(logger, cmd, workdir, stdin)
|
||||||
|
fibers <- Resource.eval(Ref.of[F, List[F[Unit]]](Nil))
|
||||||
|
} yield new SysExec[F] {
|
||||||
|
def stdout: Stream[F, Byte] =
|
||||||
|
fs2.io.readInputStream(
|
||||||
|
Sync[F].blocking(proc.getInputStream),
|
||||||
|
readChunkSz,
|
||||||
|
closeAfterUse = false
|
||||||
|
)
|
||||||
|
|
||||||
|
def stderr: Stream[F, Byte] =
|
||||||
|
fs2.io.readInputStream(
|
||||||
|
Sync[F].blocking(proc.getErrorStream),
|
||||||
|
readChunkSz,
|
||||||
|
closeAfterUse = false
|
||||||
|
)
|
||||||
|
|
||||||
|
def cancel = Sync[F].blocking(proc.destroy())
|
||||||
|
|
||||||
|
def waitFor(timeout: Option[Duration]): F[Int] = {
|
||||||
|
val to = timeout.getOrElse(cmd.timeout)
|
||||||
|
logger.trace("Waiting for command to terminate…") *>
|
||||||
|
Sync[F]
|
||||||
|
.blocking(proc.waitFor(to.millis, TimeUnit.MILLISECONDS))
|
||||||
|
.flatTap(_ => fibers.get.flatMap(_.traverse_(identity)))
|
||||||
|
.flatMap(terminated =>
|
||||||
|
if (terminated) proc.exitValue().pure[F]
|
||||||
|
else
|
||||||
|
Sync[F]
|
||||||
|
.raiseError(
|
||||||
|
new TimeoutException(s"Timed out after: ${to.formatExact}")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def consumeOutputs(out: Pipe[F, String, Unit], err: Pipe[F, String, Unit])(implicit
|
||||||
|
F: Async[F]
|
||||||
|
): Resource[F, SysExec[F]] =
|
||||||
|
for {
|
||||||
|
f1 <- F.background(stdoutLines.through(out).compile.drain)
|
||||||
|
f2 <- F.background(stderrLines.through(err).compile.drain)
|
||||||
|
_ <- Resource.eval(fibers.update(list => f1.void :: f2.void :: list))
|
||||||
|
} yield this
|
||||||
|
}
|
||||||
|
|
||||||
|
private def startProcess[F[_]: Sync, A](
|
||||||
|
logger: Logger[F],
|
||||||
|
cmd: SysCmd,
|
||||||
|
workdir: Option[Path],
|
||||||
|
stdin: Option[Stream[F, Byte]]
|
||||||
|
): Resource[F, Process] = {
|
||||||
|
val log = logger.debug(s"Running external command: ${cmd.cmdString}")
|
||||||
|
|
||||||
|
val proc = log *>
|
||||||
|
Sync[F].blocking {
|
||||||
|
val pb = new ProcessBuilder(cmd.toCmd.asJava)
|
||||||
|
.redirectInput(if (stdin.isDefined) Redirect.PIPE else Redirect.INHERIT)
|
||||||
|
.redirectError(Redirect.PIPE)
|
||||||
|
.redirectOutput(Redirect.PIPE)
|
||||||
|
|
||||||
|
val pbEnv = pb.environment()
|
||||||
|
cmd.env.foreach { (name, v) =>
|
||||||
|
pbEnv.put(name, v)
|
||||||
|
()
|
||||||
|
}
|
||||||
|
workdir.map(_.toNioPath.toFile).foreach(pb.directory)
|
||||||
|
pb.start()
|
||||||
|
}
|
||||||
|
|
||||||
|
Resource
|
||||||
|
.make(proc)(p =>
|
||||||
|
logger.debug(s"Closing process: `${cmd.cmdString}`").map(_ => p.destroy())
|
||||||
|
)
|
||||||
|
.evalMap(p =>
|
||||||
|
stdin match {
|
||||||
|
case Some(in) =>
|
||||||
|
writeToProcess(in, p).compile.drain.as(p)
|
||||||
|
case None =>
|
||||||
|
p.pure[F]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private def writeToProcess[F[_]: Sync](
|
||||||
|
data: Stream[F, Byte],
|
||||||
|
proc: Process
|
||||||
|
): Stream[F, Nothing] =
|
||||||
|
data.through(fs2.io.writeOutputStream(Sync[F].blocking(proc.getOutputStream)))
|
||||||
|
}
|
@ -4,20 +4,18 @@
|
|||||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package docspell.common
|
package docspell.common.util
|
||||||
|
|
||||||
import java.nio.file.{Path => JPath}
|
import java.nio.file.{Path => JPath}
|
||||||
|
|
||||||
import cats.FlatMap
|
|
||||||
import cats.Monad
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.syntax.all._
|
||||||
|
import cats.{FlatMap, Monad}
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import fs2.io.file.{Files, Flags, Path}
|
import fs2.io.file.{Files, Flags, Path}
|
||||||
|
|
||||||
import docspell.common.syntax.all._
|
|
||||||
|
|
||||||
import io.circe.Decoder
|
import io.circe.Decoder
|
||||||
|
import io.circe.parser
|
||||||
|
|
||||||
object File {
|
object File {
|
||||||
|
|
||||||
@ -75,6 +73,5 @@ object File {
|
|||||||
.map(_ => file)
|
.map(_ => file)
|
||||||
|
|
||||||
def readJson[F[_]: Async, A](file: Path)(implicit d: Decoder[A]): F[A] =
|
def readJson[F[_]: Async, A](file: Path)(implicit d: Decoder[A]): F[A] =
|
||||||
readText[F](file).map(_.parseJsonAs[A]).rethrow
|
readText[F](file).map(parser.decode[A]).rethrow
|
||||||
|
|
||||||
}
|
}
|
@ -0,0 +1,27 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common.util
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
|
||||||
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
|
trait Random[F[_]] {
|
||||||
|
def string(len: Int): F[String]
|
||||||
|
def string: F[String] = string(8)
|
||||||
|
}
|
||||||
|
|
||||||
|
object Random {
|
||||||
|
def apply[F[_]: Sync] =
|
||||||
|
new Random[F] {
|
||||||
|
def string(len: Int) = Sync[F].delay {
|
||||||
|
val buf = Array.ofDim[Byte](len)
|
||||||
|
new scala.util.Random().nextBytes(buf)
|
||||||
|
ByteVector.view(buf).toBase58
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -70,11 +70,13 @@ class GlobTest extends FunSuite {
|
|||||||
|
|
||||||
test("with splitting") {
|
test("with splitting") {
|
||||||
assert(Glob("a/b/*").matches(true)("a/b/hello"))
|
assert(Glob("a/b/*").matches(true)("a/b/hello"))
|
||||||
|
assert(!Glob("a/b/*").matches(true)("a/b/hello/bello"))
|
||||||
assert(!Glob("a/b/*").matches(true)("/a/b/hello"))
|
assert(!Glob("a/b/*").matches(true)("/a/b/hello"))
|
||||||
assert(Glob("/a/b/*").matches(true)("/a/b/hello"))
|
assert(Glob("/a/b/*").matches(true)("/a/b/hello"))
|
||||||
assert(!Glob("/a/b/*").matches(true)("a/b/hello"))
|
assert(!Glob("/a/b/*").matches(true)("a/b/hello"))
|
||||||
assert(!Glob("*/a/b/*").matches(true)("a/b/hello"))
|
assert(!Glob("*/a/b/*").matches(true)("a/b/hello"))
|
||||||
assert(Glob("*/a/b/*").matches(true)("test/a/b/hello"))
|
assert(Glob("*/a/b/*").matches(true)("test/a/b/hello"))
|
||||||
|
assert(!Glob("/a/b").matches(true)("/a/b/c/d"))
|
||||||
}
|
}
|
||||||
|
|
||||||
test("asString") {
|
test("asString") {
|
||||||
|
@ -0,0 +1,60 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common
|
||||||
|
|
||||||
|
import munit._
|
||||||
|
|
||||||
|
class UrlMatcherTest extends FunSuite {
|
||||||
|
|
||||||
|
test("it should match patterns") {
|
||||||
|
assertUrlsMatch(
|
||||||
|
uri("https://github.com/docspell/*") -> uri("https://github.com/docspell/dsc"),
|
||||||
|
uri("*s://test.com/*") -> uri("https://test.com/a"),
|
||||||
|
uri("*s://test.com/*") -> uri("https://test.com/a/b"),
|
||||||
|
uri("*s://test.com/*") -> uri("https://test.com/a/b/c"),
|
||||||
|
uri("*s://test.com/project/*") -> uri("https://test.com/project/c"),
|
||||||
|
uri("https://*.test.com/projects/*") -> uri("https://a.test.com/projects/p1"),
|
||||||
|
uri("https://*.test.com/projects/*") -> uri("https://b.test.com/projects/p1"),
|
||||||
|
uri("https://*.test.com/projects/*") -> uri("https://b.test.com/projects/p1")
|
||||||
|
)
|
||||||
|
|
||||||
|
assertUrlsNotMatch(
|
||||||
|
uri("https://*.test.com/projects/*") -> uri("https://test.com/projects/p1"),
|
||||||
|
uri("*s://test.com/project/*") -> uri("https://test.com/subject/c")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def uri(str: String): LenientUri = LenientUri.unsafe(str)
|
||||||
|
|
||||||
|
def assertUrlsMatch(tests: List[(LenientUri, LenientUri)]): Unit =
|
||||||
|
tests.foreach { case (patternUri, checkUri) =>
|
||||||
|
assert(
|
||||||
|
UrlMatcher.fromUrl(patternUri).matches(checkUri),
|
||||||
|
s"$patternUri does not match $checkUri"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def assertUrlsMatch(
|
||||||
|
test: (LenientUri, LenientUri),
|
||||||
|
more: (LenientUri, LenientUri)*
|
||||||
|
): Unit =
|
||||||
|
assertUrlsMatch(test :: more.toList)
|
||||||
|
|
||||||
|
def assertUrlsNotMatch(tests: List[(LenientUri, LenientUri)]): Unit =
|
||||||
|
tests.foreach { case (patternUri, checkUri) =>
|
||||||
|
assert(
|
||||||
|
!UrlMatcher.fromUrl(patternUri).matches(checkUri),
|
||||||
|
s"$patternUri incorrectly matches $checkUri"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def assertUrlsNotMatch(
|
||||||
|
test: (LenientUri, LenientUri),
|
||||||
|
more: (LenientUri, LenientUri)*
|
||||||
|
): Unit =
|
||||||
|
assertUrlsNotMatch(test :: more.toList)
|
||||||
|
}
|
@ -0,0 +1,85 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common.bc
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
|
||||||
|
import io.circe.parser
|
||||||
|
import io.circe.syntax._
|
||||||
|
import munit._
|
||||||
|
|
||||||
|
class BackendCommandTest extends FunSuite {
|
||||||
|
|
||||||
|
test("encode json") {
|
||||||
|
val bc: BackendCommand =
|
||||||
|
BackendCommand.item(
|
||||||
|
id("abc"),
|
||||||
|
List(
|
||||||
|
ItemAction.RemoveTagsCategory(Set("doctype")),
|
||||||
|
ItemAction.AddTags(Set("tag1", "tag2"))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
assertEquals(
|
||||||
|
bc.asJson.spaces2,
|
||||||
|
"""{
|
||||||
|
| "itemId" : "abc",
|
||||||
|
| "actions" : [
|
||||||
|
| {
|
||||||
|
| "categories" : [
|
||||||
|
| "doctype"
|
||||||
|
| ],
|
||||||
|
| "action" : "remove-tags-category"
|
||||||
|
| },
|
||||||
|
| {
|
||||||
|
| "tags" : [
|
||||||
|
| "tag1",
|
||||||
|
| "tag2"
|
||||||
|
| ],
|
||||||
|
| "action" : "add-tags"
|
||||||
|
| }
|
||||||
|
| ],
|
||||||
|
| "command" : "item-update"
|
||||||
|
|}""".stripMargin
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
test("decode case insensitive keys") {
|
||||||
|
val json = """{
|
||||||
|
| "itemId" : "abc",
|
||||||
|
| "actions" : [
|
||||||
|
| {
|
||||||
|
| "categories" : [
|
||||||
|
| "doctype"
|
||||||
|
| ],
|
||||||
|
| "action" : "remove-tags-category"
|
||||||
|
| },
|
||||||
|
| {
|
||||||
|
| "tags" : [
|
||||||
|
| "tag1",
|
||||||
|
| "tag2"
|
||||||
|
| ],
|
||||||
|
| "action" : "add-tags"
|
||||||
|
| }
|
||||||
|
| ],
|
||||||
|
| "command" : "item-update"
|
||||||
|
|}""".stripMargin
|
||||||
|
|
||||||
|
val bc: BackendCommand =
|
||||||
|
BackendCommand.item(
|
||||||
|
id("abc"),
|
||||||
|
List(
|
||||||
|
ItemAction.RemoveTagsCategory(Set("doctype")),
|
||||||
|
ItemAction.AddTags(Set("tag1", "tag2"))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
assertEquals(parser.decode[BackendCommand](json), Right(bc))
|
||||||
|
}
|
||||||
|
|
||||||
|
def id(str: String) = Ident.unsafe(str)
|
||||||
|
}
|
@ -13,6 +13,7 @@ import scala.reflect.ClassTag
|
|||||||
import cats.syntax.all._
|
import cats.syntax.all._
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
|
|
||||||
|
import docspell.addons.RunnerType
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.ftspsql.{PgQueryParser, RankNormalization}
|
import docspell.ftspsql.{PgQueryParser, RankNormalization}
|
||||||
import docspell.logging.{Level, LogConfig}
|
import docspell.logging.{Level, LogConfig}
|
||||||
@ -32,6 +33,17 @@ object Implicits {
|
|||||||
else super.fieldValue(name)
|
else super.fieldValue(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
implicit val urlMatcherReader: ConfigReader[UrlMatcher] = {
|
||||||
|
val fromList = ConfigReader[List[String]].emap(reason(UrlMatcher.fromStringList))
|
||||||
|
val fromString = ConfigReader[String].emap(
|
||||||
|
reason(str => UrlMatcher.fromStringList(str.split("[\\s,]+").toList))
|
||||||
|
)
|
||||||
|
fromList.orElse(fromString)
|
||||||
|
}
|
||||||
|
|
||||||
|
implicit val runnerSelectReader: ConfigReader[List[RunnerType]] =
|
||||||
|
ConfigReader[String].emap(reason(RunnerType.fromSeparatedString))
|
||||||
|
|
||||||
implicit val accountIdReader: ConfigReader[AccountId] =
|
implicit val accountIdReader: ConfigReader[AccountId] =
|
||||||
ConfigReader[String].emap(reason(AccountId.parse))
|
ConfigReader[String].emap(reason(AccountId.parse))
|
||||||
|
|
||||||
|
@ -12,6 +12,7 @@ import fs2.io.file.{Files, Path}
|
|||||||
import fs2.{Pipe, Stream}
|
import fs2.{Pipe, Stream}
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
import docspell.common.util.File
|
||||||
import docspell.convert.ConversionResult
|
import docspell.convert.ConversionResult
|
||||||
import docspell.convert.ConversionResult.{Handler, successPdf, successPdfTxt}
|
import docspell.convert.ConversionResult.{Handler, successPdf, successPdfTxt}
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
|
@ -15,6 +15,7 @@ import cats.implicits._
|
|||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
import docspell.common.util.File
|
||||||
import docspell.convert.ConversionResult.Handler
|
import docspell.convert.ConversionResult.Handler
|
||||||
import docspell.convert.extern.OcrMyPdfConfig
|
import docspell.convert.extern.OcrMyPdfConfig
|
||||||
import docspell.convert.extern.{TesseractConfig, UnoconvConfig, WkHtmlPdfConfig}
|
import docspell.convert.extern.{TesseractConfig, UnoconvConfig, WkHtmlPdfConfig}
|
||||||
|
@ -18,6 +18,7 @@ import fs2.io.file.Path
|
|||||||
import fs2.{Pipe, Stream}
|
import fs2.{Pipe, Stream}
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
import docspell.common.util.File
|
||||||
import docspell.convert.ConversionResult.Handler
|
import docspell.convert.ConversionResult.Handler
|
||||||
import docspell.files.TikaMimetype
|
import docspell.files.TikaMimetype
|
||||||
|
|
||||||
|
@ -14,6 +14,7 @@ import cats.effect.unsafe.implicits.global
|
|||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
import docspell.common.util.File
|
||||||
import docspell.convert._
|
import docspell.convert._
|
||||||
import docspell.files.ExampleFiles
|
import docspell.files.ExampleFiles
|
||||||
import docspell.logging.TestLoggingConfig
|
import docspell.logging.TestLoggingConfig
|
||||||
|
@ -11,6 +11,7 @@ import fs2.Stream
|
|||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
import docspell.common.util.File
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
|
|
||||||
object Ocr {
|
object Ocr {
|
||||||
|
@ -11,6 +11,7 @@ import java.nio.file.Paths
|
|||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
import docspell.common.util.File
|
||||||
|
|
||||||
case class OcrConfig(
|
case class OcrConfig(
|
||||||
maxImageSize: Int,
|
maxImageSize: Int,
|
||||||
|
@ -0,0 +1,61 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.files
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect.Sync
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.Stream
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import docspell.common.{MimeType, MimeTypeHint}
|
||||||
|
|
||||||
|
import io.circe.Encoder
|
||||||
|
import io.circe.syntax._
|
||||||
|
|
||||||
|
trait FileSupport {
|
||||||
|
implicit final class FileOps[F[_]: Files: Sync](self: Path) {
|
||||||
|
def detectMime: F[Option[MimeType]] =
|
||||||
|
Files[F].isReadable(self).flatMap { flag =>
|
||||||
|
OptionT
|
||||||
|
.whenF(flag) {
|
||||||
|
TikaMimetype
|
||||||
|
.detect(
|
||||||
|
Files[F].readAll(self),
|
||||||
|
MimeTypeHint.filename(self.fileName.toString)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
.value
|
||||||
|
}
|
||||||
|
|
||||||
|
def asTextFile(alt: MimeType => F[Unit]): F[Option[Path]] =
|
||||||
|
OptionT(detectMime).flatMapF { mime =>
|
||||||
|
if (mime.matches(MimeType.text("plain"))) self.some.pure[F]
|
||||||
|
else alt(mime).as(None: Option[Path])
|
||||||
|
}.value
|
||||||
|
|
||||||
|
def readText: F[String] =
|
||||||
|
Files[F]
|
||||||
|
.readAll(self)
|
||||||
|
.through(fs2.text.utf8.decode)
|
||||||
|
.compile
|
||||||
|
.string
|
||||||
|
|
||||||
|
def readAll: Stream[F, Byte] =
|
||||||
|
Files[F].readAll(self)
|
||||||
|
|
||||||
|
def writeJson[A: Encoder](value: A): F[Unit] =
|
||||||
|
Stream
|
||||||
|
.emit(value.asJson.noSpaces)
|
||||||
|
.through(fs2.text.utf8.encode)
|
||||||
|
.through(Files[F].writeAll(self))
|
||||||
|
.compile
|
||||||
|
.drain
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object FileSupport extends FileSupport
|
@ -8,11 +8,12 @@ package docspell.files
|
|||||||
|
|
||||||
import java.io.InputStream
|
import java.io.InputStream
|
||||||
import java.nio.charset.StandardCharsets
|
import java.nio.charset.StandardCharsets
|
||||||
import java.nio.file.Paths
|
|
||||||
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}
|
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
import fs2.{Pipe, Stream}
|
import fs2.{Pipe, Stream}
|
||||||
|
|
||||||
import docspell.common.Binary
|
import docspell.common.Binary
|
||||||
@ -27,16 +28,72 @@ object Zip {
|
|||||||
): Pipe[F, (String, Stream[F, Byte]), Byte] =
|
): Pipe[F, (String, Stream[F, Byte]), Byte] =
|
||||||
in => zipJava(logger, chunkSize, in.through(deduplicate))
|
in => zipJava(logger, chunkSize, in.through(deduplicate))
|
||||||
|
|
||||||
def unzipP[F[_]: Async](chunkSize: Int, glob: Glob): Pipe[F, Byte, Binary[F]] =
|
def unzip[F[_]: Async](
|
||||||
s => unzip[F](chunkSize, glob)(s)
|
chunkSize: Int,
|
||||||
|
glob: Glob
|
||||||
|
): Pipe[F, Byte, Binary[F]] =
|
||||||
|
s => unzipStream[F](chunkSize, glob)(s)
|
||||||
|
|
||||||
def unzip[F[_]: Async](chunkSize: Int, glob: Glob)(
|
def unzipStream[F[_]: Async](chunkSize: Int, glob: Glob)(
|
||||||
data: Stream[F, Byte]
|
data: Stream[F, Byte]
|
||||||
): Stream[F, Binary[F]] =
|
): Stream[F, Binary[F]] =
|
||||||
data
|
data
|
||||||
.through(fs2.io.toInputStream[F])
|
.through(fs2.io.toInputStream[F])
|
||||||
.flatMap(in => unzipJava(in, chunkSize, glob))
|
.flatMap(in => unzipJava(in, chunkSize, glob))
|
||||||
|
|
||||||
|
def saveTo[F[_]: Async](
|
||||||
|
logger: Logger[F],
|
||||||
|
targetDir: Path,
|
||||||
|
moveUp: Boolean
|
||||||
|
): Pipe[F, Binary[F], Path] =
|
||||||
|
binaries =>
|
||||||
|
binaries
|
||||||
|
.filter(e => !e.name.endsWith("/"))
|
||||||
|
.evalMap { entry =>
|
||||||
|
val out = targetDir / entry.name
|
||||||
|
val createParent =
|
||||||
|
OptionT
|
||||||
|
.fromOption[F](out.parent)
|
||||||
|
.flatMapF(parent =>
|
||||||
|
Files[F]
|
||||||
|
.exists(parent)
|
||||||
|
.map(flag => Option.when(!flag)(parent))
|
||||||
|
)
|
||||||
|
.semiflatMap(p => Files[F].createDirectories(p))
|
||||||
|
.getOrElse(())
|
||||||
|
|
||||||
|
logger.trace(s"Unzip ${entry.name} -> $out") *>
|
||||||
|
createParent *>
|
||||||
|
entry.data.through(Files[F].writeAll(out)).compile.drain
|
||||||
|
}
|
||||||
|
.drain ++ Stream
|
||||||
|
.eval(if (moveUp) moveContentsUp(logger)(targetDir) else ().pure[F])
|
||||||
|
.as(targetDir)
|
||||||
|
|
||||||
|
private def moveContentsUp[F[_]: Sync: Files](logger: Logger[F])(dir: Path): F[Unit] =
|
||||||
|
Files[F]
|
||||||
|
.list(dir)
|
||||||
|
.take(2)
|
||||||
|
.compile
|
||||||
|
.toList
|
||||||
|
.flatMap {
|
||||||
|
case subdir :: Nil =>
|
||||||
|
Files[F].isDirectory(subdir).flatMap {
|
||||||
|
case false => ().pure[F]
|
||||||
|
case true =>
|
||||||
|
Files[F]
|
||||||
|
.list(subdir)
|
||||||
|
.filter(p => p != dir)
|
||||||
|
.evalTap(c => logger.trace(s"Move $c -> ${dir / c.fileName}"))
|
||||||
|
.evalMap(child => Files[F].move(child, dir / child.fileName))
|
||||||
|
.compile
|
||||||
|
.drain
|
||||||
|
}
|
||||||
|
|
||||||
|
case _ =>
|
||||||
|
().pure[F]
|
||||||
|
}
|
||||||
|
|
||||||
def unzipJava[F[_]: Async](
|
def unzipJava[F[_]: Async](
|
||||||
in: InputStream,
|
in: InputStream,
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
@ -55,7 +112,7 @@ object Zip {
|
|||||||
.unNoneTerminate
|
.unNoneTerminate
|
||||||
.filter(ze => glob.matchFilenameOrPath(ze.getName()))
|
.filter(ze => glob.matchFilenameOrPath(ze.getName()))
|
||||||
.map { ze =>
|
.map { ze =>
|
||||||
val name = Paths.get(ze.getName()).getFileName.toString
|
val name = ze.getName()
|
||||||
val data =
|
val data =
|
||||||
fs2.io.readInputStream[F]((zin: InputStream).pure[F], chunkSize, false)
|
fs2.io.readInputStream[F]((zin: InputStream).pure[F], chunkSize, false)
|
||||||
Binary(name, data)
|
Binary(name, data)
|
||||||
|
BIN
modules/files/src/test/resources/zip-dirs-one.zip
Normal file
BIN
modules/files/src/test/resources/zip-dirs-one.zip
Normal file
Binary file not shown.
BIN
modules/files/src/test/resources/zip-dirs.zip
Normal file
BIN
modules/files/src/test/resources/zip-dirs.zip
Normal file
Binary file not shown.
@ -7,20 +7,25 @@
|
|||||||
package docspell.files
|
package docspell.files
|
||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.effect.unsafe.implicits.global
|
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
import docspell.common.Glob
|
import docspell.common.Glob
|
||||||
|
import docspell.logging.TestLoggingConfig
|
||||||
|
|
||||||
import munit._
|
import munit._
|
||||||
|
|
||||||
class ZipTest extends FunSuite {
|
class ZipTest extends CatsEffectSuite with TestLoggingConfig {
|
||||||
|
val logger = docspell.logging.getLogger[IO]
|
||||||
|
val tempDir = ResourceFixture(
|
||||||
|
Files[IO].tempDirectory(Path("target").some, "zip-test-", None)
|
||||||
|
)
|
||||||
|
|
||||||
test("unzip") {
|
test("unzip") {
|
||||||
val zipFile = ExampleFiles.letters_zip.readURL[IO](8192)
|
val zipFile = ExampleFiles.letters_zip.readURL[IO](8192)
|
||||||
val uncomp = zipFile.through(Zip.unzip(8192, Glob.all))
|
val unzip = zipFile.through(Zip.unzip(8192, Glob.all))
|
||||||
|
|
||||||
uncomp
|
unzip
|
||||||
.evalMap { entry =>
|
.evalMap { entry =>
|
||||||
val x = entry.data.map(_ => 1).foldMonoid.compile.lastOrError
|
val x = entry.data.map(_ => 1).foldMonoid.compile.lastOrError
|
||||||
x.map { size =>
|
x.map { size =>
|
||||||
@ -35,6 +40,10 @@ class ZipTest extends FunSuite {
|
|||||||
}
|
}
|
||||||
.compile
|
.compile
|
||||||
.drain
|
.drain
|
||||||
.unsafeRunSync()
|
}
|
||||||
|
|
||||||
|
tempDir.test("unzipTo directory tree") { _ =>
|
||||||
|
// val zipFile = ExampleFiles.zip_dirs_zip.readURL[IO](8192)
|
||||||
|
// zipFile.through(Zip.unzip(G))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -780,4 +780,75 @@ Docpell Update Check
|
|||||||
index-all-chunk = 10
|
index-all-chunk = 10
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
addons {
|
||||||
|
# A directory to extract addons when running them. Everything in
|
||||||
|
# here will be cleared after each run.
|
||||||
|
working-dir = ${java.io.tmpdir}"/docspell-addons"
|
||||||
|
|
||||||
|
# A directory for addons to store data between runs. This is not
|
||||||
|
# cleared by Docspell and can get large depending on the addons
|
||||||
|
# executed.
|
||||||
|
#
|
||||||
|
# This directory is used as base. In it subdirectories are created
|
||||||
|
# per run configuration id.
|
||||||
|
cache-dir = ${java.io.tmpdir}"/docspell-addon-cache"
|
||||||
|
|
||||||
|
executor-config {
|
||||||
|
# Define a (comma or whitespace separated) list of runners that
|
||||||
|
# are responsible for executing an addon. This setting is
|
||||||
|
# compared to what is supported by addons. Possible values are:
|
||||||
|
#
|
||||||
|
# - nix-flake: use nix-flake runner if the addon supports it
|
||||||
|
# (this requires the nix package manager on the joex machine)
|
||||||
|
# - docker: use docker
|
||||||
|
# - trivial: use the trivial runner
|
||||||
|
#
|
||||||
|
# The first successful execution is used. This should list all
|
||||||
|
# runners the computer supports.
|
||||||
|
runner = "nix-flake, docker, trivial"
|
||||||
|
|
||||||
|
# systemd-nspawn can be used to run the program in a container.
|
||||||
|
# This is used by runners nix-flake and trivial.
|
||||||
|
nspawn = {
|
||||||
|
# If this is false, systemd-nspawn is not tried. When true, the
|
||||||
|
# addon is executed inside a lightweight container via
|
||||||
|
# systemd-nspawn.
|
||||||
|
enabled = false
|
||||||
|
|
||||||
|
# Path to sudo command. By default systemd-nspawn is executed
|
||||||
|
# via sudo - the user running joex must be allowed to do so NON
|
||||||
|
# INTERACTIVELY. If this is empty, then nspawn is tried to
|
||||||
|
# execute without sudo.
|
||||||
|
sudo-binary = "sudo"
|
||||||
|
|
||||||
|
# Path to the systemd-nspawn command.
|
||||||
|
nspawn-binary = "systemd-nspawn"
|
||||||
|
|
||||||
|
# Workaround, if multiple same named containers are run too fast
|
||||||
|
container-wait = "100 millis"
|
||||||
|
}
|
||||||
|
|
||||||
|
# The timeout for running an addon.
|
||||||
|
run-timeout = "15 minutes"
|
||||||
|
|
||||||
|
# Configure the nix flake runner.
|
||||||
|
nix-runner {
|
||||||
|
# Path to the nix command.
|
||||||
|
nix-binary = "nix"
|
||||||
|
|
||||||
|
# The timeout for building the package (running nix build).
|
||||||
|
build-timeout = "15 minutes"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Configure the docker runner
|
||||||
|
docker-runner {
|
||||||
|
# Path to the docker command.
|
||||||
|
docker-binary = "docker"
|
||||||
|
|
||||||
|
# The timeout for building the package (running docker build).
|
||||||
|
build-timeout = "15 minutes"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
@ -12,6 +12,7 @@ import fs2.io.file.Path
|
|||||||
import docspell.analysis.TextAnalysisConfig
|
import docspell.analysis.TextAnalysisConfig
|
||||||
import docspell.analysis.classifier.TextClassifierConfig
|
import docspell.analysis.classifier.TextClassifierConfig
|
||||||
import docspell.backend.Config.Files
|
import docspell.backend.Config.Files
|
||||||
|
import docspell.backend.joex.AddonEnvConfig
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.config.{FtsType, PgFtsConfig}
|
import docspell.config.{FtsType, PgFtsConfig}
|
||||||
import docspell.convert.ConvertConfig
|
import docspell.convert.ConvertConfig
|
||||||
@ -43,7 +44,8 @@ case class Config(
|
|||||||
files: Files,
|
files: Files,
|
||||||
mailDebug: Boolean,
|
mailDebug: Boolean,
|
||||||
fullTextSearch: Config.FullTextSearch,
|
fullTextSearch: Config.FullTextSearch,
|
||||||
updateCheck: UpdateCheckConfig
|
updateCheck: UpdateCheckConfig,
|
||||||
|
addons: AddonEnvConfig
|
||||||
) {
|
) {
|
||||||
|
|
||||||
def pubSubConfig(headerValue: Ident): PubSubConfig =
|
def pubSubConfig(headerValue: Ident): PubSubConfig =
|
||||||
|
@ -145,6 +145,8 @@ object JoexAppImpl extends MailAddressCodec {
|
|||||||
schedulerModule.scheduler,
|
schedulerModule.scheduler,
|
||||||
schedulerModule.periodicScheduler
|
schedulerModule.periodicScheduler
|
||||||
)
|
)
|
||||||
|
nodes <- ONode(store)
|
||||||
|
_ <- nodes.withRegistered(cfg.appId, NodeType.Joex, cfg.baseUrl, None)
|
||||||
appR <- Resource.make(app.init.map(_ => app))(_.initShutdown)
|
appR <- Resource.make(app.init.map(_ => app))(_.initShutdown)
|
||||||
} yield appR
|
} yield appR
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ object JoexServer {
|
|||||||
Router("pubsub" -> pubSub.receiveRoute)
|
Router("pubsub" -> pubSub.receiveRoute)
|
||||||
},
|
},
|
||||||
"/api/info" -> InfoRoutes(cfg),
|
"/api/info" -> InfoRoutes(cfg),
|
||||||
"/api/v1" -> JoexRoutes(joexApp)
|
"/api/v1" -> JoexRoutes(cfg, joexApp)
|
||||||
).orNotFound
|
).orNotFound
|
||||||
|
|
||||||
// With Middlewares in place
|
// With Middlewares in place
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user