mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-06-22 02:18:26 +00:00
Experiment with addons
Addons allow to execute external programs in some context inside docspell. Currently it is possible to run them after processing files. Addons are provided by URLs to zip files.
This commit is contained in:
@ -9,8 +9,9 @@ package docspell.common
|
||||
import java.time.Instant
|
||||
|
||||
import io.circe._
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
object BaseJsonCodecs {
|
||||
trait BaseJsonCodecs {
|
||||
|
||||
implicit val encodeInstantEpoch: Encoder[Instant] =
|
||||
Encoder.encodeJavaLong.contramap(_.toEpochMilli)
|
||||
@ -18,4 +19,11 @@ object BaseJsonCodecs {
|
||||
implicit val decodeInstantEpoch: Decoder[Instant] =
|
||||
Decoder.decodeLong.map(Instant.ofEpochMilli)
|
||||
|
||||
implicit val byteVectorEncoder: Encoder[ByteVector] =
|
||||
Encoder.encodeString.contramap(_.toBase64)
|
||||
|
||||
implicit val byteVectorDecoder: Decoder[ByteVector] =
|
||||
Decoder.decodeString.emap(ByteVector.fromBase64Descriptive(_))
|
||||
}
|
||||
|
||||
object BaseJsonCodecs extends BaseJsonCodecs
|
||||
|
@ -18,6 +18,18 @@ final case class Binary[F[_]](name: String, mime: MimeType, data: Stream[F, Byte
|
||||
|
||||
def withMime(mime: MimeType): Binary[F] =
|
||||
copy(mime = mime)
|
||||
|
||||
/** Return the extension of `name` if available (without the dot) */
|
||||
def extension: Option[String] =
|
||||
name.lastIndexOf('.') match {
|
||||
case n if n > 0 =>
|
||||
Some(name.substring(n + 1))
|
||||
case _ =>
|
||||
None
|
||||
}
|
||||
|
||||
def extensionIn(extensions: Set[String]): Boolean =
|
||||
extension.exists(extensions.contains)
|
||||
}
|
||||
|
||||
object Binary {
|
||||
|
@ -32,6 +32,7 @@ object FileCategory {
|
||||
case object PreviewImage extends FileCategory
|
||||
case object Classifier extends FileCategory
|
||||
case object DownloadAll extends FileCategory
|
||||
case object Addon extends FileCategory
|
||||
|
||||
val all: NonEmptyList[FileCategory] =
|
||||
NonEmptyList.of(
|
||||
@ -39,7 +40,8 @@ object FileCategory {
|
||||
AttachmentConvert,
|
||||
PreviewImage,
|
||||
Classifier,
|
||||
DownloadAll
|
||||
DownloadAll,
|
||||
Addon
|
||||
)
|
||||
|
||||
def fromString(str: String): Either[String, FileCategory] =
|
||||
|
@ -32,7 +32,8 @@ object Glob {
|
||||
def single(str: String) =
|
||||
PatternGlob(Pattern(split(str, separator).map(makeSegment)))
|
||||
|
||||
if (in == "*") all
|
||||
if (in == all.asString) all
|
||||
else if (in == none.asString) none
|
||||
else
|
||||
split(in, anyChar) match {
|
||||
case NonEmptyList(_, Nil) =>
|
||||
@ -51,15 +52,25 @@ object Glob {
|
||||
val asString = "*"
|
||||
}
|
||||
|
||||
val none = new Glob {
|
||||
def matches(caseSensitive: Boolean)(in: String) = false
|
||||
def matchFilenameOrPath(in: String) = false
|
||||
def asString = "!*"
|
||||
}
|
||||
|
||||
def pattern(pattern: Pattern): Glob =
|
||||
PatternGlob(pattern)
|
||||
|
||||
/** A simple glob supporting `*` and `?`. */
|
||||
final private case class PatternGlob(pattern: Pattern) extends Glob {
|
||||
def matches(caseSensitive: Boolean)(in: String): Boolean =
|
||||
def matches(caseSensitive: Boolean)(in: String): Boolean = {
|
||||
val input = Glob.split(in, Glob.separator)
|
||||
|
||||
pattern.parts.size == input.size &&
|
||||
pattern.parts
|
||||
.zipWith(Glob.split(in, Glob.separator))(_.matches(caseSensitive)(_))
|
||||
.zipWith(input)(_.matches(caseSensitive)(_))
|
||||
.forall(identity)
|
||||
}
|
||||
|
||||
def matchFilenameOrPath(in: String): Boolean =
|
||||
if (pattern.parts.tail.isEmpty) matches(true)(split(in, separator).last)
|
||||
@ -67,6 +78,8 @@ object Glob {
|
||||
|
||||
def asString: String =
|
||||
pattern.asString
|
||||
|
||||
override def toString = s"PatternGlob($asString)"
|
||||
}
|
||||
|
||||
final private case class AnyGlob(globs: NonEmptyList[Glob]) extends Glob {
|
||||
@ -76,6 +89,8 @@ object Glob {
|
||||
globs.exists(_.matchFilenameOrPath(in))
|
||||
def asString =
|
||||
globs.toList.map(_.asString).mkString(anyChar.toString)
|
||||
|
||||
override def toString = s"AnyGlob($globs)"
|
||||
}
|
||||
|
||||
case class Pattern(parts: NonEmptyList[Segment]) {
|
||||
|
@ -26,6 +26,9 @@ case class Ident(id: String) {
|
||||
|
||||
def /(next: Ident): Ident =
|
||||
new Ident(id + Ident.concatChar + next.id)
|
||||
|
||||
def take(n: Int): Ident =
|
||||
new Ident(id.take(n))
|
||||
}
|
||||
|
||||
object Ident {
|
||||
|
@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common
|
||||
|
||||
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||
import io.circe.{Decoder, Encoder}
|
||||
|
||||
/** Arguments to submit a task that runs addons configured for some existing item.
|
||||
*
|
||||
* If `addonTaskIds` is non empty, only these addon tasks are run. Otherwise all addon
|
||||
* tasks that are configured for 'existing-item' are run.
|
||||
*/
|
||||
final case class ItemAddonTaskArgs(
|
||||
collective: Ident,
|
||||
itemId: Ident,
|
||||
addonRunConfigs: Set[Ident]
|
||||
)
|
||||
|
||||
object ItemAddonTaskArgs {
|
||||
val taskName: Ident = Ident.unsafe("addon-existing-item")
|
||||
|
||||
implicit val jsonDecoder: Decoder[ItemAddonTaskArgs] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[ItemAddonTaskArgs] = deriveEncoder
|
||||
}
|
@ -6,6 +6,8 @@
|
||||
|
||||
package docspell.common
|
||||
|
||||
import fs2.io.file.Path
|
||||
|
||||
case class MimeTypeHint(filename: Option[String], advertised: Option[String]) {
|
||||
|
||||
def withName(name: String): MimeTypeHint =
|
||||
@ -21,6 +23,9 @@ object MimeTypeHint {
|
||||
def filename(name: String): MimeTypeHint =
|
||||
MimeTypeHint(Some(name), None)
|
||||
|
||||
def filename(file: Path): MimeTypeHint =
|
||||
filename(file.fileName.toString)
|
||||
|
||||
def advertised(mimeType: MimeType): MimeTypeHint =
|
||||
advertised(mimeType.asString)
|
||||
|
||||
|
@ -17,7 +17,7 @@ import io.circe.generic.semiauto._
|
||||
* This task is run for each new file to create a new item from it or to add this file as
|
||||
* an attachment to an existing item.
|
||||
*
|
||||
* If the `itemId' is set to some value, the item is tried to load to ammend with the
|
||||
* If the `itemId' is set to some value, the item is tried to load to amend with the
|
||||
* given files. Otherwise a new item is created.
|
||||
*
|
||||
* It is also re-used by the 'ReProcessItem' task.
|
||||
|
@ -0,0 +1,19 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common
|
||||
|
||||
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||
import io.circe.{Decoder, Encoder}
|
||||
|
||||
final case class ScheduledAddonTaskArgs(collective: Ident, addonTaskId: Ident)
|
||||
|
||||
object ScheduledAddonTaskArgs {
|
||||
val taskName: Ident = Ident.unsafe("addon-scheduled-task")
|
||||
|
||||
implicit val jsonDecoder: Decoder[ScheduledAddonTaskArgs] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[ScheduledAddonTaskArgs] = deriveEncoder
|
||||
}
|
@ -17,11 +17,23 @@ import cats.implicits._
|
||||
import fs2.io.file.Path
|
||||
import fs2.{Stream, io, text}
|
||||
|
||||
import docspell.common.{exec => newExec}
|
||||
import docspell.logging.Logger
|
||||
|
||||
// better use `SysCmd` and `SysExec`
|
||||
object SystemCommand {
|
||||
|
||||
final case class Config(program: String, args: Seq[String], timeout: Duration) {
|
||||
final case class Config(
|
||||
program: String,
|
||||
args: Seq[String],
|
||||
timeout: Duration,
|
||||
env: Map[String, String] = Map.empty
|
||||
) {
|
||||
|
||||
def toSysCmd = newExec
|
||||
.SysCmd(program, newExec.Args(args))
|
||||
.withTimeout(timeout)
|
||||
.addEnv(newExec.Env(env))
|
||||
|
||||
def mapArgs(f: String => String): Config =
|
||||
Config(program, args.map(f), timeout)
|
||||
@ -33,6 +45,18 @@ object SystemCommand {
|
||||
}
|
||||
)
|
||||
|
||||
def withEnv(key: String, value: String): Config =
|
||||
copy(env = env.updated(key, value))
|
||||
|
||||
def addEnv(moreEnv: Map[String, String]): Config =
|
||||
copy(env = env ++ moreEnv)
|
||||
|
||||
def appendArgs(extraArgs: Args): Config =
|
||||
copy(args = args ++ extraArgs.args)
|
||||
|
||||
def appendArgs(extraArgs: Seq[String]): Config =
|
||||
copy(args = args ++ extraArgs)
|
||||
|
||||
def toCmd: List[String] =
|
||||
program :: args.toList
|
||||
|
||||
@ -40,6 +64,45 @@ object SystemCommand {
|
||||
toCmd.mkString(" ")
|
||||
}
|
||||
|
||||
final case class Args(args: Vector[String]) extends Iterable[String] {
|
||||
override def iterator = args.iterator
|
||||
|
||||
def prepend(a: String): Args = Args(a +: args)
|
||||
|
||||
def prependWhen(flag: Boolean)(a: String): Args =
|
||||
prependOption(Option.when(flag)(a))
|
||||
|
||||
def prependOption(value: Option[String]): Args =
|
||||
value.map(prepend).getOrElse(this)
|
||||
|
||||
def append(a: String, as: String*): Args =
|
||||
Args(args ++ (a +: as.toVector))
|
||||
|
||||
def appendOption(value: Option[String]): Args =
|
||||
value.map(append(_)).getOrElse(this)
|
||||
|
||||
def appendOptionVal(first: String, second: Option[String]): Args =
|
||||
second.map(b => append(first, b)).getOrElse(this)
|
||||
|
||||
def appendWhen(flag: Boolean)(a: String, as: String*): Args =
|
||||
if (flag) append(a, as: _*) else this
|
||||
|
||||
def appendWhenNot(flag: Boolean)(a: String, as: String*): Args =
|
||||
if (!flag) append(a, as: _*) else this
|
||||
|
||||
def append(p: Path): Args =
|
||||
append(p.toString)
|
||||
|
||||
def append(as: Iterable[String]): Args =
|
||||
Args(args ++ as.toVector)
|
||||
}
|
||||
object Args {
|
||||
val empty: Args = Args()
|
||||
|
||||
def apply(as: String*): Args =
|
||||
Args(as.toVector)
|
||||
}
|
||||
|
||||
final case class Result(rc: Int, stdout: String, stderr: String)
|
||||
|
||||
def exec[F[_]: Sync](
|
||||
@ -104,6 +167,10 @@ object SystemCommand {
|
||||
.redirectError(Redirect.PIPE)
|
||||
.redirectOutput(Redirect.PIPE)
|
||||
|
||||
val pbEnv = pb.environment()
|
||||
cmd.env.foreach { case (key, value) =>
|
||||
pbEnv.put(key, value)
|
||||
}
|
||||
wd.map(_.toNioPath.toFile).foreach(pb.directory)
|
||||
pb.start()
|
||||
}
|
||||
|
@ -0,0 +1,94 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
import cats.kernel.Monoid
|
||||
import cats.syntax.all._
|
||||
|
||||
trait UrlMatcher {
|
||||
def matches(url: LenientUri): Boolean
|
||||
}
|
||||
|
||||
object UrlMatcher {
|
||||
val True = instance(_ => true)
|
||||
val False = instance(_ => false)
|
||||
|
||||
def instance(f: LenientUri => Boolean): UrlMatcher =
|
||||
(url: LenientUri) => f(url)
|
||||
|
||||
def fromString(str: String): Either[String, UrlMatcher] =
|
||||
if (str == "") False.asRight
|
||||
else if (str == "*") True.asRight
|
||||
else LenientUri.parse(str).map(fromUrl)
|
||||
|
||||
def unsafeFromString(str: String): UrlMatcher =
|
||||
fromString(str).fold(sys.error, identity)
|
||||
|
||||
def fromStringList(str: List[String]): Either[String, UrlMatcher] =
|
||||
str match {
|
||||
case Nil => False.asRight
|
||||
case _ => str.map(_.trim).traverse(fromString).map(_.combineAll)
|
||||
}
|
||||
|
||||
def fromUrl(url: LenientUri): UrlMatcher = {
|
||||
val schemeGlob = Glob(url.scheme.head)
|
||||
val hostGlob = HostGlob(url.host)
|
||||
val pathGlob = Glob(url.path.asString)
|
||||
new Impl(schemeGlob, hostGlob, pathGlob, url.path.segments.size)
|
||||
}
|
||||
|
||||
def any(ulrm: IterableOnce[UrlMatcher]): UrlMatcher =
|
||||
anyMonoid.combineAll(ulrm)
|
||||
|
||||
def all(urlm: IterableOnce[UrlMatcher]): UrlMatcher =
|
||||
allMonoid.combineAll(urlm)
|
||||
|
||||
val anyMonoid: Monoid[UrlMatcher] =
|
||||
Monoid.instance(False, (a, b) => instance(url => a.matches(url) || b.matches(url)))
|
||||
|
||||
val allMonoid: Monoid[UrlMatcher] =
|
||||
Monoid.instance(True, (a, b) => instance(url => a.matches(url) && b.matches(url)))
|
||||
|
||||
implicit val defaultMonoid: Monoid[UrlMatcher] = anyMonoid
|
||||
|
||||
private class Impl(scheme: Glob, host: HostGlob, path: Glob, pathSegmentCount: Int)
|
||||
extends UrlMatcher {
|
||||
def matches(url: LenientUri) = {
|
||||
// strip path to only match prefixes
|
||||
val mPath: LenientUri.Path =
|
||||
NonEmptyList.fromList(url.path.segments.take(pathSegmentCount)) match {
|
||||
case Some(nel) => LenientUri.NonEmptyPath(nel)
|
||||
case None => LenientUri.RootPath
|
||||
}
|
||||
|
||||
url.scheme.forall(scheme.matches(false)) &&
|
||||
host.matches(url.host) &&
|
||||
path.matchFilenameOrPath(mPath.asString)
|
||||
}
|
||||
}
|
||||
|
||||
private class HostGlob(glob: Option[Glob]) {
|
||||
def matches(host: Option[String]): Boolean =
|
||||
(glob, host) match {
|
||||
case (Some(pattern), Some(word)) =>
|
||||
pattern.matches(false)(HostGlob.prepareHost(word))
|
||||
case (None, None) => true
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override def toString = s"HostGlob(${glob.map(_.asString)})"
|
||||
}
|
||||
|
||||
private object HostGlob {
|
||||
def apply(hostPattern: Option[String]): HostGlob =
|
||||
new HostGlob(hostPattern.map(p => Glob(prepareHost(p))))
|
||||
|
||||
private def prepareHost(host: String): String =
|
||||
host.replace('.', '/')
|
||||
}
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common
|
||||
|
||||
import cats.ApplicativeError
|
||||
import cats.effect._
|
||||
import fs2.Stream
|
||||
|
||||
trait UrlReader[F[_]] {
|
||||
def apply(url: LenientUri): Stream[F, Byte]
|
||||
}
|
||||
|
||||
object UrlReader {
|
||||
|
||||
def instance[F[_]](f: LenientUri => Stream[F, Byte]): UrlReader[F] =
|
||||
(url: LenientUri) => f(url)
|
||||
|
||||
def failWith[F[_]](
|
||||
message: String
|
||||
)(implicit F: ApplicativeError[F, Throwable]): UrlReader[F] =
|
||||
instance(url =>
|
||||
Stream.raiseError(
|
||||
new IllegalStateException(s"Unable to read '${url.asString}': $message")
|
||||
)
|
||||
)
|
||||
|
||||
def apply[F[_]](implicit r: UrlReader[F]): UrlReader[F] = r
|
||||
|
||||
implicit def defaultReader[F[_]: Sync]: UrlReader[F] =
|
||||
instance(_.readURL[F](8192))
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common.bc
|
||||
|
||||
import io.circe.generic.extras.Configuration
|
||||
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
|
||||
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||
import io.circe.{Decoder, Encoder}
|
||||
|
||||
sealed trait AttachmentAction {}
|
||||
|
||||
object AttachmentAction {
|
||||
|
||||
implicit val deriveConfig: Configuration =
|
||||
Configuration.default.withDiscriminator("action").withKebabCaseConstructorNames
|
||||
|
||||
case class SetExtractedText(text: Option[String]) extends AttachmentAction
|
||||
object SetExtractedText {
|
||||
implicit val jsonDecoder: Decoder[SetExtractedText] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[SetExtractedText] = deriveEncoder
|
||||
}
|
||||
|
||||
implicit val jsonDecoder: Decoder[AttachmentAction] = deriveConfiguredDecoder
|
||||
implicit val jsonEncoder: Encoder[AttachmentAction] = deriveConfiguredEncoder
|
||||
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common.bc
|
||||
|
||||
import docspell.common.Ident
|
||||
|
||||
import io.circe.generic.extras.Configuration
|
||||
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
|
||||
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||
import io.circe.{Decoder, Encoder}
|
||||
|
||||
sealed trait BackendCommand {}
|
||||
|
||||
object BackendCommand {
|
||||
|
||||
implicit val deriveConfig: Configuration =
|
||||
Configuration.default.withDiscriminator("command").withKebabCaseConstructorNames
|
||||
|
||||
case class ItemUpdate(itemId: Ident, actions: List[ItemAction]) extends BackendCommand
|
||||
object ItemUpdate {
|
||||
implicit val jsonDecoder: Decoder[ItemUpdate] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[ItemUpdate] = deriveEncoder
|
||||
}
|
||||
|
||||
def item(itemId: Ident, actions: List[ItemAction]): BackendCommand =
|
||||
ItemUpdate(itemId, actions)
|
||||
|
||||
case class AttachmentUpdate(
|
||||
itemId: Ident,
|
||||
attachId: Ident,
|
||||
actions: List[AttachmentAction]
|
||||
) extends BackendCommand
|
||||
object AttachmentUpdate {
|
||||
implicit val jsonDecoder: Decoder[AttachmentUpdate] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[AttachmentUpdate] = deriveEncoder
|
||||
}
|
||||
|
||||
implicit val jsonDecoder: Decoder[BackendCommand] = deriveConfiguredDecoder
|
||||
implicit val jsonEncoder: Encoder[BackendCommand] = deriveConfiguredEncoder
|
||||
}
|
@ -0,0 +1,17 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common.bc
|
||||
|
||||
import docspell.common.Ident
|
||||
|
||||
trait BackendCommandRunner[F[_], A] {
|
||||
|
||||
def run(collective: Ident, cmd: BackendCommand): F[A]
|
||||
|
||||
def runAll(collective: Ident, cmds: List[BackendCommand]): F[A]
|
||||
|
||||
}
|
@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common.bc
|
||||
|
||||
import docspell.common.Ident
|
||||
|
||||
import io.circe.generic.extras.Configuration
|
||||
import io.circe.generic.extras.semiauto.{deriveConfiguredDecoder, deriveConfiguredEncoder}
|
||||
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||
import io.circe.{Decoder, Encoder}
|
||||
|
||||
sealed trait ItemAction {}
|
||||
|
||||
object ItemAction {
|
||||
implicit val deriveConfig: Configuration =
|
||||
Configuration.default.withDiscriminator("action").withKebabCaseConstructorNames
|
||||
|
||||
case class AddTags(tags: Set[String]) extends ItemAction
|
||||
object AddTags {
|
||||
implicit val jsonDecoder: Decoder[AddTags] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[AddTags] = deriveEncoder
|
||||
}
|
||||
|
||||
case class ReplaceTags(tags: Set[String]) extends ItemAction
|
||||
object ReplaceTags {
|
||||
implicit val jsonDecoder: Decoder[ReplaceTags] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[ReplaceTags] = deriveEncoder
|
||||
}
|
||||
|
||||
case class RemoveTags(tags: Set[String]) extends ItemAction
|
||||
object RemoveTags {
|
||||
implicit val jsonDecoder: Decoder[RemoveTags] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[RemoveTags] = deriveEncoder
|
||||
}
|
||||
|
||||
case class RemoveTagsCategory(categories: Set[String]) extends ItemAction
|
||||
object RemoveTagsCategory {
|
||||
implicit val jsonDecoder: Decoder[RemoveTagsCategory] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[RemoveTagsCategory] = deriveEncoder
|
||||
}
|
||||
|
||||
case class SetFolder(folder: Option[String]) extends ItemAction
|
||||
object SetFolder {
|
||||
implicit val jsonDecoder: Decoder[SetFolder] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[SetFolder] = deriveEncoder
|
||||
}
|
||||
|
||||
case class SetCorrOrg(id: Option[Ident]) extends ItemAction
|
||||
object SetCorrOrg {
|
||||
implicit val jsonDecoder: Decoder[SetCorrOrg] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[SetCorrOrg] = deriveEncoder
|
||||
}
|
||||
|
||||
case class SetCorrPerson(id: Option[Ident]) extends ItemAction
|
||||
object SetCorrPerson {
|
||||
implicit val jsonDecoder: Decoder[SetCorrPerson] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[SetCorrPerson] = deriveEncoder
|
||||
}
|
||||
|
||||
case class SetConcPerson(id: Option[Ident]) extends ItemAction
|
||||
object SetConcPerson {
|
||||
implicit val jsonDecoder: Decoder[SetConcPerson] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[SetConcPerson] = deriveEncoder
|
||||
}
|
||||
|
||||
case class SetConcEquipment(id: Option[Ident]) extends ItemAction
|
||||
object SetConcEquipment {
|
||||
implicit val jsonDecoder: Decoder[SetConcEquipment] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[SetConcEquipment] = deriveEncoder
|
||||
}
|
||||
|
||||
case class SetField(field: Ident, value: String) extends ItemAction
|
||||
object SetField {
|
||||
implicit val jsonDecoder: Decoder[SetField] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[SetField] = deriveEncoder
|
||||
}
|
||||
|
||||
case class SetName(name: String) extends ItemAction
|
||||
object SetName {
|
||||
implicit val jsonDecoder: Decoder[SetName] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[SetName] = deriveEncoder
|
||||
}
|
||||
|
||||
case class SetNotes(notes: Option[String]) extends ItemAction
|
||||
object SetNotes {
|
||||
implicit val jsonDecoder: Decoder[SetNotes] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[SetNotes] = deriveEncoder
|
||||
}
|
||||
|
||||
case class AddNotes(notes: String, separator: Option[String]) extends ItemAction
|
||||
object AddNotes {
|
||||
implicit val jsonDecoder: Decoder[AddNotes] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[AddNotes] = deriveEncoder
|
||||
}
|
||||
|
||||
implicit val jsonDecoder: Decoder[ItemAction] = deriveConfiguredDecoder
|
||||
implicit val jsonEncoder: Encoder[ItemAction] = deriveConfiguredEncoder
|
||||
}
|
@ -0,0 +1,49 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common.exec
|
||||
|
||||
import fs2.io.file.Path
|
||||
|
||||
case class Args(values: Seq[String]) {
|
||||
|
||||
def option(key: String, value: String): Args =
|
||||
Args(values ++ Seq(key, value))
|
||||
|
||||
def option(key: String, value: Option[String]): Args =
|
||||
value.map(v => option(key, v)).getOrElse(this)
|
||||
|
||||
def appendOpt(v: Option[String]): Args =
|
||||
v.map(e => Args(values :+ e)).getOrElse(this)
|
||||
|
||||
def append(v: String, vs: String*): Args =
|
||||
Args(values ++ (v +: vs))
|
||||
|
||||
def append(path: Path): Args =
|
||||
append(path.toString)
|
||||
|
||||
def append(args: Args): Args =
|
||||
Args(values ++ args.values)
|
||||
|
||||
def append(args: Seq[String]): Args =
|
||||
Args(values ++ args)
|
||||
|
||||
def prepend(v: String): Args =
|
||||
Args(v +: values)
|
||||
|
||||
def prependWhen(flag: Boolean)(v: String) =
|
||||
if (flag) prepend(v) else this
|
||||
|
||||
def cmdString: String =
|
||||
values.mkString(" ")
|
||||
}
|
||||
|
||||
object Args {
|
||||
val empty: Args = Args(Seq.empty)
|
||||
|
||||
def of(v: String*): Args =
|
||||
Args(v)
|
||||
}
|
37
modules/common/src/main/scala/docspell/common/exec/Env.scala
Normal file
37
modules/common/src/main/scala/docspell/common/exec/Env.scala
Normal file
@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common.exec
|
||||
|
||||
case class Env(values: Map[String, String]) {
|
||||
|
||||
def add(name: String, value: String): Env =
|
||||
copy(values.updated(name, value))
|
||||
|
||||
def addAll(v: Map[String, String]): Env =
|
||||
Env(values ++ v)
|
||||
|
||||
def addAll(e: Env): Env =
|
||||
Env(values ++ e.values)
|
||||
|
||||
def ++(e: Env) = addAll(e)
|
||||
|
||||
def foreach(f: (String, String) => Unit): Unit =
|
||||
values.foreach(t => f(t._1, t._2))
|
||||
|
||||
def map[A](f: (String, String) => A): Seq[A] =
|
||||
values.map(f.tupled).toSeq
|
||||
|
||||
def mapConcat[A](f: (String, String) => Seq[A]): Seq[A] =
|
||||
values.flatMap(f.tupled).toSeq
|
||||
}
|
||||
|
||||
object Env {
|
||||
val empty: Env = Env(Map.empty)
|
||||
|
||||
def of(nv: (String, String)*): Env =
|
||||
Env(Map(nv: _*))
|
||||
}
|
@ -0,0 +1,43 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common.exec
|
||||
|
||||
import docspell.common._
|
||||
|
||||
final case class SysCmd(
|
||||
program: String,
|
||||
args: Args,
|
||||
env: Env,
|
||||
timeout: Duration
|
||||
) {
|
||||
|
||||
def withArgs(f: Args => Args): SysCmd =
|
||||
copy(args = f(args))
|
||||
|
||||
def withTimeout(to: Duration): SysCmd =
|
||||
copy(timeout = to)
|
||||
|
||||
def withEnv(f: Env => Env): SysCmd =
|
||||
copy(env = f(env))
|
||||
|
||||
def addEnv(env: Env): SysCmd =
|
||||
withEnv(_.addAll(env))
|
||||
|
||||
def cmdString: String =
|
||||
s"$program ${args.cmdString}"
|
||||
|
||||
private[exec] def toCmd: Seq[String] =
|
||||
program +: args.values
|
||||
}
|
||||
|
||||
object SysCmd {
|
||||
def apply(prg: String, args: String*): SysCmd =
|
||||
apply(prg, Args(args))
|
||||
|
||||
def apply(prg: String, args: Args): SysCmd =
|
||||
SysCmd(prg, args, Env.empty, Duration.minutes(2))
|
||||
}
|
163
modules/common/src/main/scala/docspell/common/exec/SysExec.scala
Normal file
163
modules/common/src/main/scala/docspell/common/exec/SysExec.scala
Normal file
@ -0,0 +1,163 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common.exec
|
||||
|
||||
import java.lang.ProcessBuilder.Redirect
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
import scala.concurrent.TimeoutException
|
||||
import scala.jdk.CollectionConverters._
|
||||
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
import fs2.io.file.Path
|
||||
import fs2.{Pipe, Stream}
|
||||
|
||||
import docspell.common.Duration
|
||||
import docspell.logging.Logger
|
||||
|
||||
trait SysExec[F[_]] {
|
||||
|
||||
def stdout: Stream[F, Byte]
|
||||
|
||||
def stdoutLines: Stream[F, String] =
|
||||
stdout
|
||||
.through(fs2.text.utf8.decode)
|
||||
.through(fs2.text.lines)
|
||||
|
||||
def stderr: Stream[F, Byte]
|
||||
|
||||
def stderrLines: Stream[F, String] =
|
||||
stderr
|
||||
.through(fs2.text.utf8.decode)
|
||||
.through(fs2.text.lines)
|
||||
|
||||
def waitFor(timeout: Option[Duration] = None): F[Int]
|
||||
|
||||
/** Sends a signal to the process to terminate it immediately */
|
||||
def cancel: F[Unit]
|
||||
|
||||
/** Consume lines of output of the process in background. */
|
||||
def consumeOutputs(out: Pipe[F, String, Unit], err: Pipe[F, String, Unit])(implicit
|
||||
F: Async[F]
|
||||
): Resource[F, SysExec[F]]
|
||||
|
||||
/** Consumes stderr lines (left) and stdout lines (right) in a background thread. */
|
||||
def consumeOutputs(
|
||||
m: Either[String, String] => F[Unit]
|
||||
)(implicit F: Async[F]): Resource[F, SysExec[F]] = {
|
||||
val pe: Pipe[F, String, Unit] = _.map(_.asLeft).evalMap(m)
|
||||
val po: Pipe[F, String, Unit] = _.map(_.asRight).evalMap(m)
|
||||
consumeOutputs(po, pe)
|
||||
}
|
||||
|
||||
def logOutputs(logger: Logger[F], name: String)(implicit F: Async[F]) =
|
||||
consumeOutputs {
|
||||
case Right(line) => logger.debug(s"[$name (out)]: $line")
|
||||
case Left(line) => logger.debug(s"[$name (err)]: $line")
|
||||
}
|
||||
}
|
||||
|
||||
object SysExec {
|
||||
private val readChunkSz = 8 * 1024
|
||||
|
||||
def apply[F[_]: Sync](
|
||||
cmd: SysCmd,
|
||||
logger: Logger[F],
|
||||
workdir: Option[Path] = None,
|
||||
stdin: Option[Stream[F, Byte]] = None
|
||||
): Resource[F, SysExec[F]] =
|
||||
for {
|
||||
proc <- startProcess(logger, cmd, workdir, stdin)
|
||||
fibers <- Resource.eval(Ref.of[F, List[F[Unit]]](Nil))
|
||||
} yield new SysExec[F] {
|
||||
def stdout: Stream[F, Byte] =
|
||||
fs2.io.readInputStream(
|
||||
Sync[F].blocking(proc.getInputStream),
|
||||
readChunkSz,
|
||||
closeAfterUse = false
|
||||
)
|
||||
|
||||
def stderr: Stream[F, Byte] =
|
||||
fs2.io.readInputStream(
|
||||
Sync[F].blocking(proc.getErrorStream),
|
||||
readChunkSz,
|
||||
closeAfterUse = false
|
||||
)
|
||||
|
||||
def cancel = Sync[F].blocking(proc.destroy())
|
||||
|
||||
def waitFor(timeout: Option[Duration]): F[Int] = {
|
||||
val to = timeout.getOrElse(cmd.timeout)
|
||||
logger.trace("Waiting for command to terminate…") *>
|
||||
Sync[F]
|
||||
.blocking(proc.waitFor(to.millis, TimeUnit.MILLISECONDS))
|
||||
.flatTap(_ => fibers.get.flatMap(_.traverse_(identity)))
|
||||
.flatMap(terminated =>
|
||||
if (terminated) proc.exitValue().pure[F]
|
||||
else
|
||||
Sync[F]
|
||||
.raiseError(
|
||||
new TimeoutException(s"Timed out after: ${to.formatExact}")
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
def consumeOutputs(out: Pipe[F, String, Unit], err: Pipe[F, String, Unit])(implicit
|
||||
F: Async[F]
|
||||
): Resource[F, SysExec[F]] =
|
||||
for {
|
||||
f1 <- F.background(stdoutLines.through(out).compile.drain)
|
||||
f2 <- F.background(stderrLines.through(err).compile.drain)
|
||||
_ <- Resource.eval(fibers.update(list => f1.void :: f2.void :: list))
|
||||
} yield this
|
||||
}
|
||||
|
||||
private def startProcess[F[_]: Sync, A](
|
||||
logger: Logger[F],
|
||||
cmd: SysCmd,
|
||||
workdir: Option[Path],
|
||||
stdin: Option[Stream[F, Byte]]
|
||||
): Resource[F, Process] = {
|
||||
val log = logger.debug(s"Running external command: ${cmd.cmdString}")
|
||||
|
||||
val proc = log *>
|
||||
Sync[F].blocking {
|
||||
val pb = new ProcessBuilder(cmd.toCmd.asJava)
|
||||
.redirectInput(if (stdin.isDefined) Redirect.PIPE else Redirect.INHERIT)
|
||||
.redirectError(Redirect.PIPE)
|
||||
.redirectOutput(Redirect.PIPE)
|
||||
|
||||
val pbEnv = pb.environment()
|
||||
cmd.env.foreach { (name, v) =>
|
||||
pbEnv.put(name, v)
|
||||
()
|
||||
}
|
||||
workdir.map(_.toNioPath.toFile).foreach(pb.directory)
|
||||
pb.start()
|
||||
}
|
||||
|
||||
Resource
|
||||
.make(proc)(p =>
|
||||
logger.debug(s"Closing process: `${cmd.cmdString}`").map(_ => p.destroy())
|
||||
)
|
||||
.evalMap(p =>
|
||||
stdin match {
|
||||
case Some(in) =>
|
||||
writeToProcess(in, p).compile.drain.as(p)
|
||||
case None =>
|
||||
p.pure[F]
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
private def writeToProcess[F[_]: Sync](
|
||||
data: Stream[F, Byte],
|
||||
proc: Process
|
||||
): Stream[F, Nothing] =
|
||||
data.through(fs2.io.writeOutputStream(Sync[F].blocking(proc.getOutputStream)))
|
||||
}
|
@ -4,20 +4,18 @@
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common
|
||||
package docspell.common.util
|
||||
|
||||
import java.nio.file.{Path => JPath}
|
||||
|
||||
import cats.FlatMap
|
||||
import cats.Monad
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import cats.syntax.all._
|
||||
import cats.{FlatMap, Monad}
|
||||
import fs2.Stream
|
||||
import fs2.io.file.{Files, Flags, Path}
|
||||
|
||||
import docspell.common.syntax.all._
|
||||
|
||||
import io.circe.Decoder
|
||||
import io.circe.parser
|
||||
|
||||
object File {
|
||||
|
||||
@ -75,6 +73,5 @@ object File {
|
||||
.map(_ => file)
|
||||
|
||||
def readJson[F[_]: Async, A](file: Path)(implicit d: Decoder[A]): F[A] =
|
||||
readText[F](file).map(_.parseJsonAs[A]).rethrow
|
||||
|
||||
readText[F](file).map(parser.decode[A]).rethrow
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common.util
|
||||
|
||||
import cats.effect._
|
||||
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
trait Random[F[_]] {
|
||||
def string(len: Int): F[String]
|
||||
def string: F[String] = string(8)
|
||||
}
|
||||
|
||||
object Random {
|
||||
def apply[F[_]: Sync] =
|
||||
new Random[F] {
|
||||
def string(len: Int) = Sync[F].delay {
|
||||
val buf = Array.ofDim[Byte](len)
|
||||
new scala.util.Random().nextBytes(buf)
|
||||
ByteVector.view(buf).toBase58
|
||||
}
|
||||
}
|
||||
}
|
@ -70,11 +70,13 @@ class GlobTest extends FunSuite {
|
||||
|
||||
test("with splitting") {
|
||||
assert(Glob("a/b/*").matches(true)("a/b/hello"))
|
||||
assert(!Glob("a/b/*").matches(true)("a/b/hello/bello"))
|
||||
assert(!Glob("a/b/*").matches(true)("/a/b/hello"))
|
||||
assert(Glob("/a/b/*").matches(true)("/a/b/hello"))
|
||||
assert(!Glob("/a/b/*").matches(true)("a/b/hello"))
|
||||
assert(!Glob("*/a/b/*").matches(true)("a/b/hello"))
|
||||
assert(Glob("*/a/b/*").matches(true)("test/a/b/hello"))
|
||||
assert(!Glob("/a/b").matches(true)("/a/b/c/d"))
|
||||
}
|
||||
|
||||
test("asString") {
|
||||
|
@ -0,0 +1,60 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common
|
||||
|
||||
import munit._
|
||||
|
||||
class UrlMatcherTest extends FunSuite {
|
||||
|
||||
test("it should match patterns") {
|
||||
assertUrlsMatch(
|
||||
uri("https://github.com/docspell/*") -> uri("https://github.com/docspell/dsc"),
|
||||
uri("*s://test.com/*") -> uri("https://test.com/a"),
|
||||
uri("*s://test.com/*") -> uri("https://test.com/a/b"),
|
||||
uri("*s://test.com/*") -> uri("https://test.com/a/b/c"),
|
||||
uri("*s://test.com/project/*") -> uri("https://test.com/project/c"),
|
||||
uri("https://*.test.com/projects/*") -> uri("https://a.test.com/projects/p1"),
|
||||
uri("https://*.test.com/projects/*") -> uri("https://b.test.com/projects/p1"),
|
||||
uri("https://*.test.com/projects/*") -> uri("https://b.test.com/projects/p1")
|
||||
)
|
||||
|
||||
assertUrlsNotMatch(
|
||||
uri("https://*.test.com/projects/*") -> uri("https://test.com/projects/p1"),
|
||||
uri("*s://test.com/project/*") -> uri("https://test.com/subject/c")
|
||||
)
|
||||
}
|
||||
|
||||
def uri(str: String): LenientUri = LenientUri.unsafe(str)
|
||||
|
||||
def assertUrlsMatch(tests: List[(LenientUri, LenientUri)]): Unit =
|
||||
tests.foreach { case (patternUri, checkUri) =>
|
||||
assert(
|
||||
UrlMatcher.fromUrl(patternUri).matches(checkUri),
|
||||
s"$patternUri does not match $checkUri"
|
||||
)
|
||||
}
|
||||
|
||||
def assertUrlsMatch(
|
||||
test: (LenientUri, LenientUri),
|
||||
more: (LenientUri, LenientUri)*
|
||||
): Unit =
|
||||
assertUrlsMatch(test :: more.toList)
|
||||
|
||||
def assertUrlsNotMatch(tests: List[(LenientUri, LenientUri)]): Unit =
|
||||
tests.foreach { case (patternUri, checkUri) =>
|
||||
assert(
|
||||
!UrlMatcher.fromUrl(patternUri).matches(checkUri),
|
||||
s"$patternUri incorrectly matches $checkUri"
|
||||
)
|
||||
}
|
||||
|
||||
def assertUrlsNotMatch(
|
||||
test: (LenientUri, LenientUri),
|
||||
more: (LenientUri, LenientUri)*
|
||||
): Unit =
|
||||
assertUrlsNotMatch(test :: more.toList)
|
||||
}
|
@ -0,0 +1,85 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common.bc
|
||||
|
||||
import docspell.common._
|
||||
|
||||
import io.circe.parser
|
||||
import io.circe.syntax._
|
||||
import munit._
|
||||
|
||||
class BackendCommandTest extends FunSuite {
|
||||
|
||||
test("encode json") {
|
||||
val bc: BackendCommand =
|
||||
BackendCommand.item(
|
||||
id("abc"),
|
||||
List(
|
||||
ItemAction.RemoveTagsCategory(Set("doctype")),
|
||||
ItemAction.AddTags(Set("tag1", "tag2"))
|
||||
)
|
||||
)
|
||||
|
||||
assertEquals(
|
||||
bc.asJson.spaces2,
|
||||
"""{
|
||||
| "itemId" : "abc",
|
||||
| "actions" : [
|
||||
| {
|
||||
| "categories" : [
|
||||
| "doctype"
|
||||
| ],
|
||||
| "action" : "remove-tags-category"
|
||||
| },
|
||||
| {
|
||||
| "tags" : [
|
||||
| "tag1",
|
||||
| "tag2"
|
||||
| ],
|
||||
| "action" : "add-tags"
|
||||
| }
|
||||
| ],
|
||||
| "command" : "item-update"
|
||||
|}""".stripMargin
|
||||
)
|
||||
}
|
||||
|
||||
test("decode case insensitive keys") {
|
||||
val json = """{
|
||||
| "itemId" : "abc",
|
||||
| "actions" : [
|
||||
| {
|
||||
| "categories" : [
|
||||
| "doctype"
|
||||
| ],
|
||||
| "action" : "remove-tags-category"
|
||||
| },
|
||||
| {
|
||||
| "tags" : [
|
||||
| "tag1",
|
||||
| "tag2"
|
||||
| ],
|
||||
| "action" : "add-tags"
|
||||
| }
|
||||
| ],
|
||||
| "command" : "item-update"
|
||||
|}""".stripMargin
|
||||
|
||||
val bc: BackendCommand =
|
||||
BackendCommand.item(
|
||||
id("abc"),
|
||||
List(
|
||||
ItemAction.RemoveTagsCategory(Set("doctype")),
|
||||
ItemAction.AddTags(Set("tag1", "tag2"))
|
||||
)
|
||||
)
|
||||
|
||||
assertEquals(parser.decode[BackendCommand](json), Right(bc))
|
||||
}
|
||||
|
||||
def id(str: String) = Ident.unsafe(str)
|
||||
}
|
Reference in New Issue
Block a user