mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-04-04 10:29:34 +00:00
Merge pull request #2348 from rehanone/update/scala-fs2-despendencies
Updated fs2 and related dependencies
This commit is contained in:
commit
c0f684eb0d
@ -16,7 +16,7 @@ val scalafixSettings = Seq(
|
|||||||
|
|
||||||
val sharedSettings = Seq(
|
val sharedSettings = Seq(
|
||||||
organization := "com.github.eikek",
|
organization := "com.github.eikek",
|
||||||
scalaVersion := "2.13.10",
|
scalaVersion := "2.13.12",
|
||||||
organizationName := "Eike K. & Contributors",
|
organizationName := "Eike K. & Contributors",
|
||||||
licenses += ("AGPL-3.0-or-later", url(
|
licenses += ("AGPL-3.0-or-later", url(
|
||||||
"https://spdx.org/licenses/AGPL-3.0-or-later.html"
|
"https://spdx.org/licenses/AGPL-3.0-or-later.html"
|
||||||
|
@ -19,7 +19,7 @@ final case class AddonArchive(url: LenientUri, name: String, version: String) {
|
|||||||
def nameAndVersion: String =
|
def nameAndVersion: String =
|
||||||
s"$name-$version"
|
s"$name-$version"
|
||||||
|
|
||||||
def extractTo[F[_]: Async](
|
def extractTo[F[_]: Async: Files](
|
||||||
reader: UrlReader[F],
|
reader: UrlReader[F],
|
||||||
directory: Path,
|
directory: Path,
|
||||||
withSubdir: Boolean = true,
|
withSubdir: Boolean = true,
|
||||||
@ -48,7 +48,7 @@ final case class AddonArchive(url: LenientUri, name: String, version: String) {
|
|||||||
/** Read meta either from the given directory or extract the url to find the metadata
|
/** Read meta either from the given directory or extract the url to find the metadata
|
||||||
* file to read
|
* file to read
|
||||||
*/
|
*/
|
||||||
def readMeta[F[_]: Async](
|
def readMeta[F[_]: Async: Files](
|
||||||
urlReader: UrlReader[F],
|
urlReader: UrlReader[F],
|
||||||
directory: Option[Path] = None
|
directory: Option[Path] = None
|
||||||
): F[AddonMeta] =
|
): F[AddonMeta] =
|
||||||
@ -58,7 +58,7 @@ final case class AddonArchive(url: LenientUri, name: String, version: String) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
object AddonArchive {
|
object AddonArchive {
|
||||||
def read[F[_]: Async](
|
def read[F[_]: Async: Files](
|
||||||
url: LenientUri,
|
url: LenientUri,
|
||||||
urlReader: UrlReader[F],
|
urlReader: UrlReader[F],
|
||||||
extractDir: Option[Path] = None
|
extractDir: Option[Path] = None
|
||||||
@ -69,7 +69,7 @@ object AddonArchive {
|
|||||||
.map(m => addon.copy(name = m.meta.name, version = m.meta.version))
|
.map(m => addon.copy(name = m.meta.name, version = m.meta.version))
|
||||||
}
|
}
|
||||||
|
|
||||||
def dockerAndFlakeExists[F[_]: Async](
|
def dockerAndFlakeExists[F[_]: Async: Files](
|
||||||
archive: Either[Path, Stream[F, Byte]]
|
archive: Either[Path, Stream[F, Byte]]
|
||||||
): F[(Boolean, Boolean)] = {
|
): F[(Boolean, Boolean)] = {
|
||||||
val files = Files[F]
|
val files = Files[F]
|
||||||
|
@ -29,7 +29,7 @@ trait AddonExecutor[F[_]] {
|
|||||||
|
|
||||||
object AddonExecutor {
|
object AddonExecutor {
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
cfg: AddonExecutorConfig,
|
cfg: AddonExecutorConfig,
|
||||||
urlReader: UrlReader[F]
|
urlReader: UrlReader[F]
|
||||||
): AddonExecutor[F] =
|
): AddonExecutor[F] =
|
||||||
@ -104,7 +104,7 @@ object AddonExecutor {
|
|||||||
} yield result
|
} yield result
|
||||||
}
|
}
|
||||||
|
|
||||||
def selectRunner[F[_]: Async](
|
def selectRunner[F[_]: Async: Files](
|
||||||
cfg: AddonExecutorConfig,
|
cfg: AddonExecutorConfig,
|
||||||
meta: AddonMeta,
|
meta: AddonMeta,
|
||||||
addonDir: Path
|
addonDir: Path
|
||||||
|
@ -50,7 +50,7 @@ case class AddonMeta(
|
|||||||
* inspecting the archive to return defaults when the addon isn't declaring it in the
|
* inspecting the archive to return defaults when the addon isn't declaring it in the
|
||||||
* descriptor.
|
* descriptor.
|
||||||
*/
|
*/
|
||||||
def enabledTypes[F[_]: Async](
|
def enabledTypes[F[_]: Async: Files](
|
||||||
archive: Either[Path, Stream[F, Byte]]
|
archive: Either[Path, Stream[F, Byte]]
|
||||||
): F[List[RunnerType]] =
|
): F[List[RunnerType]] =
|
||||||
for {
|
for {
|
||||||
@ -207,7 +207,7 @@ object AddonMeta {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
def findInZip[F[_]: Async](zipFile: Stream[F, Byte]): F[AddonMeta] = {
|
def findInZip[F[_]: Async: Files](zipFile: Stream[F, Byte]): F[AddonMeta] = {
|
||||||
val logger = docspell.logging.getLogger[F]
|
val logger = docspell.logging.getLogger[F]
|
||||||
val fail: F[AddonMeta] = Async[F].raiseError(
|
val fail: F[AddonMeta] = Async[F].raiseError(
|
||||||
new FileNotFoundException(
|
new FileNotFoundException(
|
||||||
|
@ -10,6 +10,7 @@ import cats.Applicative
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.syntax.all._
|
import cats.syntax.all._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.addons.runner._
|
import docspell.addons.runner._
|
||||||
import docspell.common.exec.Env
|
import docspell.common.exec.Env
|
||||||
@ -26,7 +27,9 @@ trait AddonRunner[F[_]] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
object AddonRunner {
|
object AddonRunner {
|
||||||
def forType[F[_]: Async](cfg: AddonExecutorConfig)(rt: RunnerType) =
|
def forType[F[_]: Async: Files](
|
||||||
|
cfg: AddonExecutorConfig
|
||||||
|
)(rt: RunnerType): AddonRunner[F] =
|
||||||
rt match {
|
rt match {
|
||||||
case RunnerType.NixFlake => NixFlakeRunner[F](cfg)
|
case RunnerType.NixFlake => NixFlakeRunner[F](cfg)
|
||||||
case RunnerType.Docker => DockerRunner[F](cfg)
|
case RunnerType.Docker => DockerRunner[F](cfg)
|
||||||
@ -38,9 +41,9 @@ object AddonRunner {
|
|||||||
|
|
||||||
def pure[F[_]: Applicative](result: AddonResult): AddonRunner[F] =
|
def pure[F[_]: Applicative](result: AddonResult): AddonRunner[F] =
|
||||||
new AddonRunner[F] {
|
new AddonRunner[F] {
|
||||||
val runnerType = Nil
|
val runnerType: List[RunnerType] = Nil
|
||||||
|
|
||||||
def run(logger: Logger[F], env: Env, ctx: Context) =
|
def run(logger: Logger[F], env: Env, ctx: Context): F[AddonResult] =
|
||||||
Applicative[F].pure(result)
|
Applicative[F].pure(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -50,9 +53,9 @@ object AddonRunner {
|
|||||||
case a :: Nil => a
|
case a :: Nil => a
|
||||||
case _ =>
|
case _ =>
|
||||||
new AddonRunner[F] {
|
new AddonRunner[F] {
|
||||||
val runnerType = runners.flatMap(_.runnerType).distinct
|
val runnerType: List[RunnerType] = runners.flatMap(_.runnerType).distinct
|
||||||
|
|
||||||
def run(logger: Logger[F], env: Env, ctx: Context) =
|
def run(logger: Logger[F], env: Env, ctx: Context): F[AddonResult] =
|
||||||
Stream
|
Stream
|
||||||
.emits(runners)
|
.emits(runners)
|
||||||
.evalTap(r =>
|
.evalTap(r =>
|
||||||
|
@ -18,7 +18,7 @@ import docspell.common.Duration
|
|||||||
import docspell.common.exec._
|
import docspell.common.exec._
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
|
|
||||||
final class NixFlakeRunner[F[_]: Async](cfg: NixFlakeRunner.Config)
|
final class NixFlakeRunner[F[_]: Async: Files](cfg: NixFlakeRunner.Config)
|
||||||
extends AddonRunner[F] {
|
extends AddonRunner[F] {
|
||||||
|
|
||||||
val runnerType = List(RunnerType.NixFlake)
|
val runnerType = List(RunnerType.NixFlake)
|
||||||
@ -104,7 +104,7 @@ final class NixFlakeRunner[F[_]: Async](cfg: NixFlakeRunner.Config)
|
|||||||
}
|
}
|
||||||
|
|
||||||
object NixFlakeRunner {
|
object NixFlakeRunner {
|
||||||
def apply[F[_]: Async](cfg: AddonExecutorConfig): NixFlakeRunner[F] =
|
def apply[F[_]: Async: Files](cfg: AddonExecutorConfig): NixFlakeRunner[F] =
|
||||||
new NixFlakeRunner[F](Config(cfg.nixRunner, cfg.nspawn, cfg.runTimeout))
|
new NixFlakeRunner[F](Config(cfg.nixRunner, cfg.nspawn, cfg.runTimeout))
|
||||||
|
|
||||||
case class Config(
|
case class Config(
|
||||||
|
@ -45,7 +45,7 @@ private[addons] object RunnerUtil {
|
|||||||
* expected to be relative to the `ctx.baseDir`. Additional arguments and environment
|
* expected to be relative to the `ctx.baseDir`. Additional arguments and environment
|
||||||
* variables are added as configured in the addon.
|
* variables are added as configured in the addon.
|
||||||
*/
|
*/
|
||||||
def runInContainer[F[_]: Async](
|
def runInContainer[F[_]: Async: Files](
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
cfg: AddonExecutorConfig.NSpawn,
|
cfg: AddonExecutorConfig.NSpawn,
|
||||||
ctx: Context
|
ctx: Context
|
||||||
|
@ -19,10 +19,12 @@ import docspell.common.Duration
|
|||||||
import docspell.common.exec.{Args, Env, SysCmd}
|
import docspell.common.exec.{Args, Env, SysCmd}
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
|
|
||||||
final class TrivialRunner[F[_]: Async](cfg: TrivialRunner.Config) extends AddonRunner[F] {
|
final class TrivialRunner[F[_]: Async: Files](cfg: TrivialRunner.Config)
|
||||||
|
extends AddonRunner[F] {
|
||||||
private val sync = Async[F]
|
private val sync = Async[F]
|
||||||
private val files = Files[F]
|
private val files = Files[F]
|
||||||
implicit val andMonoid: Monoid[Boolean] = Monoid.instance[Boolean](true, _ && _)
|
implicit val andMonoid: Monoid[Boolean] =
|
||||||
|
Monoid.instance[Boolean](emptyValue = true, _ && _)
|
||||||
|
|
||||||
private val executeBits = PosixPermissions(
|
private val executeBits = PosixPermissions(
|
||||||
OwnerExecute,
|
OwnerExecute,
|
||||||
@ -34,13 +36,13 @@ final class TrivialRunner[F[_]: Async](cfg: TrivialRunner.Config) extends AddonR
|
|||||||
OthersRead
|
OthersRead
|
||||||
)
|
)
|
||||||
|
|
||||||
val runnerType = List(RunnerType.Trivial)
|
val runnerType: List[RunnerType] = List(RunnerType.Trivial)
|
||||||
|
|
||||||
def run(
|
def run(
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
env: Env,
|
env: Env,
|
||||||
ctx: Context
|
ctx: Context
|
||||||
) = {
|
): F[AddonResult] = {
|
||||||
val binaryPath = ctx.meta.runner
|
val binaryPath = ctx.meta.runner
|
||||||
.flatMap(_.trivial)
|
.flatMap(_.trivial)
|
||||||
.map(_.exec)
|
.map(_.exec)
|
||||||
@ -71,7 +73,7 @@ final class TrivialRunner[F[_]: Async](cfg: TrivialRunner.Config) extends AddonR
|
|||||||
}
|
}
|
||||||
|
|
||||||
object TrivialRunner {
|
object TrivialRunner {
|
||||||
def apply[F[_]: Async](cfg: AddonExecutorConfig): TrivialRunner[F] =
|
def apply[F[_]: Async: Files](cfg: AddonExecutorConfig): TrivialRunner[F] =
|
||||||
new TrivialRunner[F](Config(cfg.nspawn, cfg.runTimeout))
|
new TrivialRunner[F](Config(cfg.nspawn, cfg.runTimeout))
|
||||||
|
|
||||||
case class Config(nspawn: NSpawn, timeout: Duration)
|
case class Config(nspawn: NSpawn, timeout: Duration)
|
||||||
|
@ -9,6 +9,7 @@ package docspell.analysis
|
|||||||
import cats.Applicative
|
import cats.Applicative
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.analysis.classifier.{StanfordTextClassifier, TextClassifier}
|
import docspell.analysis.classifier.{StanfordTextClassifier, TextClassifier}
|
||||||
import docspell.analysis.contact.Contact
|
import docspell.analysis.contact.Contact
|
||||||
@ -36,7 +37,7 @@ object TextAnalyser {
|
|||||||
labels ++ dates.map(dl => dl.label.copy(label = dl.date.toString))
|
labels ++ dates.map(dl => dl.label.copy(label = dl.date.toString))
|
||||||
}
|
}
|
||||||
|
|
||||||
def create[F[_]: Async](cfg: TextAnalysisConfig): Resource[F, TextAnalyser[F]] =
|
def create[F[_]: Async: Files](cfg: TextAnalysisConfig): Resource[F, TextAnalyser[F]] =
|
||||||
Resource
|
Resource
|
||||||
.eval(Nlp(cfg.nlpConfig))
|
.eval(Nlp(cfg.nlpConfig))
|
||||||
.map(stanfordNer =>
|
.map(stanfordNer =>
|
||||||
@ -83,7 +84,7 @@ object TextAnalyser {
|
|||||||
|
|
||||||
/** Provides the nlp pipeline based on the configuration. */
|
/** Provides the nlp pipeline based on the configuration. */
|
||||||
private object Nlp {
|
private object Nlp {
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
cfg: TextAnalysisConfig.NlpConfig
|
cfg: TextAnalysisConfig.NlpConfig
|
||||||
): F[Input[F] => F[Vector[NerLabel]]] = {
|
): F[Input[F] => F[Vector[NerLabel]]] = {
|
||||||
val log = docspell.logging.getLogger[F]
|
val log = docspell.logging.getLogger[F]
|
||||||
|
@ -21,7 +21,7 @@ import docspell.logging.Logger
|
|||||||
|
|
||||||
import edu.stanford.nlp.classify.ColumnDataClassifier
|
import edu.stanford.nlp.classify.ColumnDataClassifier
|
||||||
|
|
||||||
final class StanfordTextClassifier[F[_]: Async](cfg: TextClassifierConfig)
|
final class StanfordTextClassifier[F[_]: Async: Files](cfg: TextClassifierConfig)
|
||||||
extends TextClassifier[F] {
|
extends TextClassifier[F] {
|
||||||
|
|
||||||
def trainClassifier[A](
|
def trainClassifier[A](
|
||||||
|
@ -11,6 +11,7 @@ import scala.concurrent.duration.{Duration => _, _}
|
|||||||
import cats.effect.Ref
|
import cats.effect.Ref
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.analysis.NlpSettings
|
import docspell.analysis.NlpSettings
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
@ -32,7 +33,7 @@ trait PipelineCache[F[_]] {
|
|||||||
object PipelineCache {
|
object PipelineCache {
|
||||||
private[this] val logger = docspell.logging.unsafeLogger
|
private[this] val logger = docspell.logging.unsafeLogger
|
||||||
|
|
||||||
def apply[F[_]: Async](clearInterval: Duration)(
|
def apply[F[_]: Async: Files](clearInterval: Duration)(
|
||||||
creator: NlpSettings => Annotator[F],
|
creator: NlpSettings => Annotator[F],
|
||||||
release: F[Unit]
|
release: F[Unit]
|
||||||
): F[PipelineCache[F]] = {
|
): F[PipelineCache[F]] = {
|
||||||
@ -44,7 +45,7 @@ object PipelineCache {
|
|||||||
} yield new Impl[F](data, creator, cacheClear)
|
} yield new Impl[F](data, creator, cacheClear)
|
||||||
}
|
}
|
||||||
|
|
||||||
final private class Impl[F[_]: Async](
|
final private class Impl[F[_]: Async: Files](
|
||||||
data: Ref[F, Map[String, Entry[Annotator[F]]]],
|
data: Ref[F, Map[String, Entry[Annotator[F]]]],
|
||||||
creator: NlpSettings => Annotator[F],
|
creator: NlpSettings => Annotator[F],
|
||||||
cacheClear: CacheClearing[F]
|
cacheClear: CacheClearing[F]
|
||||||
|
@ -7,6 +7,7 @@
|
|||||||
package docspell.backend
|
package docspell.backend
|
||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.backend.BackendCommands.EventContext
|
import docspell.backend.BackendCommands.EventContext
|
||||||
import docspell.backend.auth.Login
|
import docspell.backend.auth.Login
|
||||||
@ -65,7 +66,7 @@ trait BackendApp[F[_]] {
|
|||||||
|
|
||||||
object BackendApp {
|
object BackendApp {
|
||||||
|
|
||||||
def create[F[_]: Async](
|
def create[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
javaEmil: Emil[F],
|
javaEmil: Emil[F],
|
||||||
|
@ -9,6 +9,7 @@ package docspell.backend.joex
|
|||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.syntax.all._
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.addons._
|
import docspell.addons._
|
||||||
import docspell.backend.joex.AddonOps.{AddonRunConfigRef, ExecResult}
|
import docspell.backend.joex.AddonOps.{AddonRunConfigRef, ExecResult}
|
||||||
@ -98,7 +99,7 @@ object AddonOps {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
cfg: AddonEnvConfig,
|
cfg: AddonEnvConfig,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
cmdRunner: BackendCommandRunner[F, Unit],
|
cmdRunner: BackendCommandRunner[F, Unit],
|
||||||
@ -160,7 +161,10 @@ object AddonOps {
|
|||||||
execRes = ExecResult(List(result), List(runCfg))
|
execRes = ExecResult(List(result), List(runCfg))
|
||||||
} yield execRes
|
} yield execRes
|
||||||
|
|
||||||
def createMiddleware(custom: Middleware[F], runCfg: AddonRunConfigRef) = for {
|
def createMiddleware(
|
||||||
|
custom: Middleware[F],
|
||||||
|
runCfg: AddonRunConfigRef
|
||||||
|
): F[Middleware[F]] = for {
|
||||||
dscMW <- prepare.createDscEnv(runCfg, cfg.executorConfig.runTimeout)
|
dscMW <- prepare.createDscEnv(runCfg, cfg.executorConfig.runTimeout)
|
||||||
mm = dscMW >> custom >> prepare.logResult(logger, runCfg) >> Middleware
|
mm = dscMW >> custom >> prepare.logResult(logger, runCfg) >> Middleware
|
||||||
.ephemeralRun[F]
|
.ephemeralRun[F]
|
||||||
|
@ -10,7 +10,7 @@ import cats.data.EitherT
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.syntax.all._
|
import cats.syntax.all._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
import docspell.addons.{AddonMeta, RunnerType}
|
import docspell.addons.{AddonMeta, RunnerType}
|
||||||
import docspell.backend.Config
|
import docspell.backend.Config
|
||||||
@ -21,7 +21,7 @@ import docspell.joexapi.model.AddonSupport
|
|||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
import docspell.store.records.RAddonArchive
|
import docspell.store.records.RAddonArchive
|
||||||
|
|
||||||
final class AddonValidate[F[_]: Async](
|
final class AddonValidate[F[_]: Async: Files](
|
||||||
cfg: Config.Addons,
|
cfg: Config.Addons,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
joexOps: OJoex[F]
|
joexOps: OJoex[F]
|
||||||
|
@ -9,6 +9,7 @@ package docspell.backend.ops
|
|||||||
import cats.data.{EitherT, NonEmptyList, OptionT}
|
import cats.data.{EitherT, NonEmptyList, OptionT}
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.syntax.all._
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.addons.{AddonMeta, AddonTriggerType}
|
import docspell.addons.{AddonMeta, AddonTriggerType}
|
||||||
import docspell.backend.ops.AddonValidationError._
|
import docspell.backend.ops.AddonValidationError._
|
||||||
@ -129,7 +130,7 @@ object OAddons {
|
|||||||
def failure[A](error: AddonValidationError): AddonValidationResult[A] = Left(error)
|
def failure[A](error: AddonValidationError): AddonValidationResult[A] = Left(error)
|
||||||
}
|
}
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
cfg: Config.Addons,
|
cfg: Config.Addons,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
userTasks: UserTaskStore[F],
|
userTasks: UserTaskStore[F],
|
||||||
|
@ -39,7 +39,7 @@ final case class Binary[F[_]](name: String, mime: MimeType, data: Stream[F, Byte
|
|||||||
|
|
||||||
object Binary {
|
object Binary {
|
||||||
|
|
||||||
def apply[F[_]: Async](file: Path): Binary[F] =
|
def apply[F[_]: Files](file: Path): Binary[F] =
|
||||||
Binary(file.fileName.toString, Files[F].readAll(file))
|
Binary(file.fileName.toString, Files[F].readAll(file))
|
||||||
|
|
||||||
def apply[F[_]](name: String, data: Stream[F, Byte]): Binary[F] =
|
def apply[F[_]](name: String, data: Stream[F, Byte]): Binary[F] =
|
||||||
@ -74,11 +74,11 @@ object Binary {
|
|||||||
data.chunks.map(_.toByteVector).compile.fold(ByteVector.empty)((r, e) => r ++ e)
|
data.chunks.map(_.toByteVector).compile.fold(ByteVector.empty)((r, e) => r ++ e)
|
||||||
|
|
||||||
/** Convert paths into `Binary`s */
|
/** Convert paths into `Binary`s */
|
||||||
def toBinary[F[_]: Async]: Pipe[F, Path, Binary[F]] =
|
def toBinary[F[_]: Files]: Pipe[F, Path, Binary[F]] =
|
||||||
_.map(Binary[F](_))
|
_.map(Binary[F](_))
|
||||||
|
|
||||||
/** Save one or more binaries to a target directory. */
|
/** Save one or more binaries to a target directory. */
|
||||||
def saveTo[F[_]: Async](
|
def saveTo[F[_]: Async: Files](
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
targetDir: Path
|
targetDir: Path
|
||||||
): Pipe[F, Binary[F], Path] =
|
): Pipe[F, Binary[F], Path] =
|
||||||
|
@ -72,6 +72,6 @@ object File {
|
|||||||
.drain
|
.drain
|
||||||
.map(_ => file)
|
.map(_ => file)
|
||||||
|
|
||||||
def readJson[F[_]: Async, A](file: Path)(implicit d: Decoder[A]): F[A] =
|
def readJson[F[_]: Async: Files, A](file: Path)(implicit d: Decoder[A]): F[A] =
|
||||||
readText[F](file).map(parser.decode[A]).rethrow
|
readText[F](file).map(parser.decode[A]).rethrow
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
package docspell.common.util
|
package docspell.common.util
|
||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.{Files, Path}
|
||||||
import fs2.{Pipe, Stream}
|
import fs2.{Pipe, Stream}
|
||||||
|
|
||||||
import docspell.common.Glob
|
import docspell.common.Glob
|
||||||
@ -33,9 +33,9 @@ trait Zip[F[_]] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
object Zip {
|
object Zip {
|
||||||
val defaultChunkSize = 64 * 1024
|
private val defaultChunkSize = 64 * 1024
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
logger: Option[Logger[F]] = None,
|
logger: Option[Logger[F]] = None,
|
||||||
tempDir: Option[Path] = None
|
tempDir: Option[Path] = None
|
||||||
): Zip[F] =
|
): Zip[F] =
|
||||||
|
@ -22,7 +22,7 @@ import fs2.{Chunk, Pipe, Stream}
|
|||||||
import docspell.common.Glob
|
import docspell.common.Glob
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
|
|
||||||
final private class ZipImpl[F[_]: Async](
|
final private class ZipImpl[F[_]: Async: Files](
|
||||||
log: Option[Logger[F]],
|
log: Option[Logger[F]],
|
||||||
tempDir: Option[Path]
|
tempDir: Option[Path]
|
||||||
) extends Zip[F] {
|
) extends Zip[F] {
|
||||||
|
@ -27,7 +27,10 @@ object ConfigFactory {
|
|||||||
* 1. if no file is found, read the config from environment variables falling back to
|
* 1. if no file is found, read the config from environment variables falling back to
|
||||||
* the default config
|
* the default config
|
||||||
*/
|
*/
|
||||||
def default[F[_]: Async, C: ClassTag: ConfigReader](logger: Logger[F], atPath: String)(
|
def default[F[_]: Async: Files, C: ClassTag: ConfigReader](
|
||||||
|
logger: Logger[F],
|
||||||
|
atPath: String
|
||||||
|
)(
|
||||||
args: List[String],
|
args: List[String],
|
||||||
validation: Validation[C]
|
validation: Validation[C]
|
||||||
): F[C] =
|
): F[C] =
|
||||||
@ -74,7 +77,7 @@ object ConfigFactory {
|
|||||||
/** Uses the first argument as a path to the config file. If it is specified but the
|
/** Uses the first argument as a path to the config file. If it is specified but the
|
||||||
* file doesn't exist, an exception is thrown.
|
* file doesn't exist, an exception is thrown.
|
||||||
*/
|
*/
|
||||||
private def findFileFromArgs[F[_]: Async](args: List[String]): F[Option[Path]] =
|
private def findFileFromArgs[F[_]: Async: Files](args: List[String]): F[Option[Path]] =
|
||||||
args.headOption
|
args.headOption
|
||||||
.map(Path.apply)
|
.map(Path.apply)
|
||||||
.traverse(p =>
|
.traverse(p =>
|
||||||
@ -89,7 +92,7 @@ object ConfigFactory {
|
|||||||
* to giving the file as argument, it is not an error to specify a non-existing file
|
* to giving the file as argument, it is not an error to specify a non-existing file
|
||||||
* via a system property.
|
* via a system property.
|
||||||
*/
|
*/
|
||||||
private def checkSystemProperty[F[_]: Async]: OptionT[F, Path] =
|
private def checkSystemProperty[F[_]: Async: Files]: OptionT[F, Path] =
|
||||||
for {
|
for {
|
||||||
cf <- OptionT(
|
cf <- OptionT(
|
||||||
Sync[F].delay(
|
Sync[F].delay(
|
||||||
|
@ -11,6 +11,7 @@ import java.nio.charset.StandardCharsets
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2._
|
import fs2._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.convert.ConversionResult.Handler
|
import docspell.convert.ConversionResult.Handler
|
||||||
@ -32,7 +33,7 @@ trait Conversion[F[_]] {
|
|||||||
|
|
||||||
object Conversion {
|
object Conversion {
|
||||||
|
|
||||||
def create[F[_]: Async](
|
def create[F[_]: Async: Files](
|
||||||
cfg: ConvertConfig,
|
cfg: ConvertConfig,
|
||||||
sanitizeHtml: SanitizeHtml,
|
sanitizeHtml: SanitizeHtml,
|
||||||
additionalPasswords: List[Password],
|
additionalPasswords: List[Password],
|
||||||
|
@ -19,7 +19,7 @@ import docspell.logging.Logger
|
|||||||
|
|
||||||
private[extern] object ExternConv {
|
private[extern] object ExternConv {
|
||||||
|
|
||||||
def toPDF[F[_]: Async, A](
|
def toPDF[F[_]: Async: Files, A](
|
||||||
name: String,
|
name: String,
|
||||||
cmdCfg: SystemCommand.Config,
|
cmdCfg: SystemCommand.Config,
|
||||||
wd: Path,
|
wd: Path,
|
||||||
@ -71,7 +71,7 @@ private[extern] object ExternConv {
|
|||||||
handler.run(ConversionResult.failure(ex))
|
handler.run(ConversionResult.failure(ex))
|
||||||
}
|
}
|
||||||
|
|
||||||
def readResult[F[_]: Async](
|
def readResult[F[_]: Async: Files](
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
)(out: Path, result: SystemCommand.Result): F[ConversionResult[F]] =
|
)(out: Path, result: SystemCommand.Result): F[ConversionResult[F]] =
|
||||||
@ -99,7 +99,7 @@ private[extern] object ExternConv {
|
|||||||
.pure[F]
|
.pure[F]
|
||||||
}
|
}
|
||||||
|
|
||||||
def readResultTesseract[F[_]: Async](
|
def readResultTesseract[F[_]: Async: Files](
|
||||||
outPrefix: String,
|
outPrefix: String,
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
@ -127,7 +127,7 @@ private[extern] object ExternConv {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private def storeDataToFile[F[_]: Async](
|
private def storeDataToFile[F[_]: Async: Files](
|
||||||
name: String,
|
name: String,
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
inFile: Path
|
inFile: Path
|
||||||
@ -146,7 +146,7 @@ private[extern] object ExternConv {
|
|||||||
logger.debug(s"$name stdout: ${result.stdout}") *>
|
logger.debug(s"$name stdout: ${result.stdout}") *>
|
||||||
logger.debug(s"$name stderr: ${result.stderr}")
|
logger.debug(s"$name stderr: ${result.stderr}")
|
||||||
|
|
||||||
private def storeFile[F[_]: Async](
|
private def storeFile[F[_]: Async: Files](
|
||||||
in: Stream[F, Byte],
|
in: Stream[F, Byte],
|
||||||
target: Path
|
target: Path
|
||||||
): F[Unit] =
|
): F[Unit] =
|
||||||
|
@ -8,7 +8,7 @@ package docspell.convert.extern
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.convert.ConversionResult
|
import docspell.convert.ConversionResult
|
||||||
@ -17,7 +17,7 @@ import docspell.logging.Logger
|
|||||||
|
|
||||||
object OcrMyPdf {
|
object OcrMyPdf {
|
||||||
|
|
||||||
def toPDF[F[_]: Async, A](
|
def toPDF[F[_]: Async: Files, A](
|
||||||
cfg: OcrMyPdfConfig,
|
cfg: OcrMyPdfConfig,
|
||||||
lang: Language,
|
lang: Language,
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
|
@ -8,7 +8,7 @@ package docspell.convert.extern
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.convert.ConversionResult
|
import docspell.convert.ConversionResult
|
||||||
@ -17,7 +17,7 @@ import docspell.logging.Logger
|
|||||||
|
|
||||||
object Tesseract {
|
object Tesseract {
|
||||||
|
|
||||||
def toPDF[F[_]: Async, A](
|
def toPDF[F[_]: Async: Files, A](
|
||||||
cfg: TesseractConfig,
|
cfg: TesseractConfig,
|
||||||
lang: Language,
|
lang: Language,
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
|
@ -8,7 +8,7 @@ package docspell.convert.extern
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.convert.ConversionResult
|
import docspell.convert.ConversionResult
|
||||||
@ -17,7 +17,7 @@ import docspell.logging.Logger
|
|||||||
|
|
||||||
object Unoconv {
|
object Unoconv {
|
||||||
|
|
||||||
def toPDF[F[_]: Async, A](
|
def toPDF[F[_]: Async: Files, A](
|
||||||
cfg: UnoconvConfig,
|
cfg: UnoconvConfig,
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
|
@ -10,7 +10,7 @@ import java.nio.charset.Charset
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.{Files, Path}
|
||||||
import fs2.{Chunk, Stream}
|
import fs2.{Chunk, Stream}
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
@ -20,7 +20,7 @@ import docspell.logging.Logger
|
|||||||
|
|
||||||
object Weasyprint {
|
object Weasyprint {
|
||||||
|
|
||||||
def toPDF[F[_]: Async, A](
|
def toPDF[F[_]: Async: Files, A](
|
||||||
cfg: WeasyprintConfig,
|
cfg: WeasyprintConfig,
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
charset: Charset,
|
charset: Charset,
|
||||||
@ -46,7 +46,7 @@ object Weasyprint {
|
|||||||
)
|
)
|
||||||
|
|
||||||
ExternConv
|
ExternConv
|
||||||
.toPDF[F, A]("weasyprint", cmdCfg, cfg.workingDir, true, logger, reader)(
|
.toPDF[F, A]("weasyprint", cmdCfg, cfg.workingDir, useStdin = true, logger, reader)(
|
||||||
inSane,
|
inSane,
|
||||||
handler
|
handler
|
||||||
)
|
)
|
||||||
|
@ -10,7 +10,7 @@ import java.nio.charset.Charset
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.{Files, Path}
|
||||||
import fs2.{Chunk, Stream}
|
import fs2.{Chunk, Stream}
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
@ -20,7 +20,7 @@ import docspell.logging.Logger
|
|||||||
|
|
||||||
object WkHtmlPdf {
|
object WkHtmlPdf {
|
||||||
|
|
||||||
def toPDF[F[_]: Async, A](
|
def toPDF[F[_]: Async: Files, A](
|
||||||
cfg: WkHtmlPdfConfig,
|
cfg: WkHtmlPdfConfig,
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
charset: Charset,
|
charset: Charset,
|
||||||
@ -46,7 +46,14 @@ object WkHtmlPdf {
|
|||||||
)
|
)
|
||||||
|
|
||||||
ExternConv
|
ExternConv
|
||||||
.toPDF[F, A]("wkhtmltopdf", cmdCfg, cfg.workingDir, true, logger, reader)(
|
.toPDF[F, A](
|
||||||
|
"wkhtmltopdf",
|
||||||
|
cmdCfg,
|
||||||
|
cfg.workingDir,
|
||||||
|
useStdin = true,
|
||||||
|
logger,
|
||||||
|
reader
|
||||||
|
)(
|
||||||
inSane,
|
inSane,
|
||||||
handler
|
handler
|
||||||
)
|
)
|
||||||
|
@ -9,6 +9,7 @@ package docspell.extract
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.extract.internal.Text
|
import docspell.extract.internal.Text
|
||||||
@ -32,7 +33,7 @@ trait Extraction[F[_]] {
|
|||||||
|
|
||||||
object Extraction {
|
object Extraction {
|
||||||
|
|
||||||
def create[F[_]: Async](
|
def create[F[_]: Async: Files](
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
cfg: ExtractConfig
|
cfg: ExtractConfig
|
||||||
): Extraction[F] =
|
): Extraction[F] =
|
||||||
|
@ -9,6 +9,7 @@ package docspell.extract
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.common.Language
|
import docspell.common.Language
|
||||||
import docspell.extract.internal.Text
|
import docspell.extract.internal.Text
|
||||||
@ -24,7 +25,7 @@ object PdfExtract {
|
|||||||
Result(t._1, t._2)
|
Result(t._1, t._2)
|
||||||
}
|
}
|
||||||
|
|
||||||
def get[F[_]: Async](
|
def get[F[_]: Async: Files](
|
||||||
in: Stream[F, Byte],
|
in: Stream[F, Byte],
|
||||||
lang: Language,
|
lang: Language,
|
||||||
stripMinLen: Int,
|
stripMinLen: Int,
|
||||||
|
@ -8,7 +8,7 @@ package docspell.extract.ocr
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.common.util.File
|
import docspell.common.util.File
|
||||||
@ -17,7 +17,7 @@ import docspell.logging.Logger
|
|||||||
object Ocr {
|
object Ocr {
|
||||||
|
|
||||||
/** Extract the text of all pages in the given pdf file. */
|
/** Extract the text of all pages in the given pdf file. */
|
||||||
def extractPdf[F[_]: Async](
|
def extractPdf[F[_]: Async: Files](
|
||||||
pdf: Stream[F, Byte],
|
pdf: Stream[F, Byte],
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
lang: String,
|
lang: String,
|
||||||
@ -40,7 +40,7 @@ object Ocr {
|
|||||||
): Stream[F, String] =
|
): Stream[F, String] =
|
||||||
runTesseractStdin(img, logger, lang, config)
|
runTesseractStdin(img, logger, lang, config)
|
||||||
|
|
||||||
def extractPdFFile[F[_]: Async](
|
def extractPdFFile[F[_]: Async: Files](
|
||||||
pdf: Path,
|
pdf: Path,
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
lang: String,
|
lang: String,
|
||||||
@ -65,7 +65,7 @@ object Ocr {
|
|||||||
/** Run ghostscript to extract all pdf pages into tiff files. The files are stored to a
|
/** Run ghostscript to extract all pdf pages into tiff files. The files are stored to a
|
||||||
* temporary location on disk and returned.
|
* temporary location on disk and returned.
|
||||||
*/
|
*/
|
||||||
private[extract] def runGhostscript[F[_]: Async](
|
private[extract] def runGhostscript[F[_]: Async: Files](
|
||||||
pdf: Stream[F, Byte],
|
pdf: Stream[F, Byte],
|
||||||
cfg: OcrConfig,
|
cfg: OcrConfig,
|
||||||
wd: Path,
|
wd: Path,
|
||||||
@ -91,7 +91,7 @@ object Ocr {
|
|||||||
/** Run ghostscript to extract all pdf pages into tiff files. The files are stored to a
|
/** Run ghostscript to extract all pdf pages into tiff files. The files are stored to a
|
||||||
* temporary location on disk and returned.
|
* temporary location on disk and returned.
|
||||||
*/
|
*/
|
||||||
private[extract] def runGhostscriptFile[F[_]: Async](
|
private[extract] def runGhostscriptFile[F[_]: Async: Files](
|
||||||
pdf: Path,
|
pdf: Path,
|
||||||
ghostscript: SystemCommand.Config,
|
ghostscript: SystemCommand.Config,
|
||||||
wd: Path,
|
wd: Path,
|
||||||
|
@ -8,6 +8,7 @@ package docspell.extract.ocr
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.extract.internal.Text
|
import docspell.extract.internal.Text
|
||||||
@ -16,7 +17,7 @@ import docspell.logging.Logger
|
|||||||
|
|
||||||
object TextExtract {
|
object TextExtract {
|
||||||
|
|
||||||
def extract[F[_]: Async](
|
def extract[F[_]: Async: Files](
|
||||||
in: Stream[F, Byte],
|
in: Stream[F, Byte],
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
lang: String,
|
lang: String,
|
||||||
@ -24,7 +25,7 @@ object TextExtract {
|
|||||||
): Stream[F, Text] =
|
): Stream[F, Text] =
|
||||||
extractOCR(in, logger, lang, config)
|
extractOCR(in, logger, lang, config)
|
||||||
|
|
||||||
def extractOCR[F[_]: Async](
|
def extractOCR[F[_]: Async: Files](
|
||||||
in: Stream[F, Byte],
|
in: Stream[F, Byte],
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
lang: String,
|
lang: String,
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
package docspell.files
|
package docspell.files
|
||||||
|
|
||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
import cats.effect.{Async, Sync}
|
import cats.effect.Sync
|
||||||
import cats.syntax.all._
|
import cats.syntax.all._
|
||||||
import fs2.Pipe
|
import fs2.Pipe
|
||||||
import fs2.io.file.{Files, Path}
|
import fs2.io.file.{Files, Path}
|
||||||
@ -39,7 +39,7 @@ trait FileSupport {
|
|||||||
TikaMimetype.detect[F](bin.data, hint).map(mt => bin.copy(mime = mt))
|
TikaMimetype.detect[F](bin.data, hint).map(mt => bin.copy(mime = mt))
|
||||||
}
|
}
|
||||||
|
|
||||||
def toBinaryWithMime[F[_]: Async]: Pipe[F, Path, Binary[F]] =
|
def toBinaryWithMime[F[_]: Sync: Files]: Pipe[F, Path, Binary[F]] =
|
||||||
_.evalMap(file => file.mimeType.map(mt => Binary(file).copy(mime = mt)))
|
_.evalMap(file => file.mimeType.map(mt => Binary(file).copy(mime = mt)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@
|
|||||||
package docspell.joex
|
package docspell.joex
|
||||||
|
|
||||||
import cats.effect.Async
|
import cats.effect.Async
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.config.Implicits._
|
import docspell.config.Implicits._
|
||||||
import docspell.config.{ConfigFactory, FtsType, Validation}
|
import docspell.config.{ConfigFactory, FtsType, Validation}
|
||||||
@ -25,7 +26,7 @@ object ConfigFile {
|
|||||||
// IntelliJ is wrong, this is required
|
// IntelliJ is wrong, this is required
|
||||||
import Implicits._
|
import Implicits._
|
||||||
|
|
||||||
def loadConfig[F[_]: Async](args: List[String]): F[Config] = {
|
def loadConfig[F[_]: Async: Files](args: List[String]): F[Config] = {
|
||||||
val logger = docspell.logging.getLogger[F]
|
val logger = docspell.logging.getLogger[F]
|
||||||
ConfigFactory
|
ConfigFactory
|
||||||
.default[F, Config](logger, "docspell.joex")(args, validate)
|
.default[F, Config](logger, "docspell.joex")(args, validate)
|
||||||
|
@ -9,6 +9,8 @@ package docspell.joex
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.concurrent.SignallingRef
|
import fs2.concurrent.SignallingRef
|
||||||
|
import fs2.io.file.Files
|
||||||
|
import fs2.io.net.Network
|
||||||
|
|
||||||
import docspell.backend.MailAddressCodec
|
import docspell.backend.MailAddressCodec
|
||||||
import docspell.backend.joex.FindJobOwnerAccount
|
import docspell.backend.joex.FindJobOwnerAccount
|
||||||
@ -45,7 +47,7 @@ final class JoexAppImpl[F[_]: Async](
|
|||||||
def init: F[Unit] = {
|
def init: F[Unit] = {
|
||||||
val run = scheduler.start.compile.drain
|
val run = scheduler.start.compile.drain
|
||||||
val prun = periodicScheduler.start.compile.drain
|
val prun = periodicScheduler.start.compile.drain
|
||||||
val eventConsume = notificationMod.consumeAllEvents(2).compile.drain
|
val eventConsume = notificationMod.consumeAllEvents(maxConcurrent = 2).compile.drain
|
||||||
for {
|
for {
|
||||||
_ <- scheduleBackgroundTasks
|
_ <- scheduleBackgroundTasks
|
||||||
_ <- Async[F].start(run)
|
_ <- Async[F].start(run)
|
||||||
@ -62,7 +64,9 @@ final class JoexAppImpl[F[_]: Async](
|
|||||||
store.transact(RJobLog.findLogs(jobId))
|
store.transact(RJobLog.findLogs(jobId))
|
||||||
|
|
||||||
def initShutdown: F[Unit] =
|
def initShutdown: F[Unit] =
|
||||||
periodicScheduler.shutdown *> scheduler.shutdown(false) *> termSignal.set(true)
|
periodicScheduler.shutdown *> scheduler.shutdown(cancelAll = false) *> termSignal.set(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
|
||||||
private def scheduleBackgroundTasks: F[Unit] =
|
private def scheduleBackgroundTasks: F[Unit] =
|
||||||
HouseKeepingTask
|
HouseKeepingTask
|
||||||
@ -81,7 +85,8 @@ final class JoexAppImpl[F[_]: Async](
|
|||||||
private def scheduleEmptyTrashTasks: F[Unit] =
|
private def scheduleEmptyTrashTasks: F[Unit] =
|
||||||
store
|
store
|
||||||
.transact(
|
.transact(
|
||||||
REmptyTrashSetting.findForAllCollectives(OCollective.EmptyTrash.default, 50)
|
REmptyTrashSetting
|
||||||
|
.findForAllCollectives(OCollective.EmptyTrash.default, chunkSize = 50)
|
||||||
)
|
)
|
||||||
.evalMap { es =>
|
.evalMap { es =>
|
||||||
val args = EmptyTrashArgs(es.cid, es.minAge)
|
val args = EmptyTrashArgs(es.cid, es.minAge)
|
||||||
@ -98,7 +103,7 @@ final class JoexAppImpl[F[_]: Async](
|
|||||||
|
|
||||||
object JoexAppImpl extends MailAddressCodec {
|
object JoexAppImpl extends MailAddressCodec {
|
||||||
|
|
||||||
def create[F[_]: Async](
|
def create[F[_]: Async: Files: Network](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
termSignal: SignallingRef[F, Boolean],
|
termSignal: SignallingRef[F, Boolean],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
@ -107,12 +112,14 @@ object JoexAppImpl extends MailAddressCodec {
|
|||||||
pools: Pools
|
pools: Pools
|
||||||
): Resource[F, JoexApp[F]] =
|
): Resource[F, JoexApp[F]] =
|
||||||
for {
|
for {
|
||||||
joexLogger <- Resource.pure(docspell.logging.getLogger[F](s"joex-${cfg.appId.id}"))
|
joexLogger <- Resource.pure(
|
||||||
|
docspell.logging.getLogger[F](name = s"joex-${cfg.appId.id}")
|
||||||
|
)
|
||||||
pubSubT = PubSubT(pubSub, joexLogger)
|
pubSubT = PubSubT(pubSub, joexLogger)
|
||||||
javaEmil =
|
javaEmil =
|
||||||
JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug))
|
JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug))
|
||||||
notificationMod <- Resource.eval(
|
notificationMod <- Resource.eval(
|
||||||
NotificationModuleImpl[F](store, javaEmil, httpClient, 200)
|
NotificationModuleImpl[F](store, javaEmil, httpClient, queueSize = 200)
|
||||||
)
|
)
|
||||||
|
|
||||||
jobStoreModule = JobStoreModuleBuilder(store)
|
jobStoreModule = JobStoreModuleBuilder(store)
|
||||||
|
@ -9,6 +9,8 @@ package docspell.joex
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import fs2.concurrent.SignallingRef
|
import fs2.concurrent.SignallingRef
|
||||||
|
import fs2.io.file.Files
|
||||||
|
import fs2.io.net.Network
|
||||||
|
|
||||||
import docspell.backend.msg.Topics
|
import docspell.backend.msg.Topics
|
||||||
import docspell.common.Pools
|
import docspell.common.Pools
|
||||||
@ -32,7 +34,10 @@ object JoexServer {
|
|||||||
exitRef: Ref[F, ExitCode]
|
exitRef: Ref[F, ExitCode]
|
||||||
)
|
)
|
||||||
|
|
||||||
def stream[F[_]: Async](cfg: Config, pools: Pools): Stream[F, Nothing] = {
|
def stream[F[_]: Async: Files: Network](
|
||||||
|
cfg: Config,
|
||||||
|
pools: Pools
|
||||||
|
): Stream[F, Nothing] = {
|
||||||
|
|
||||||
val app = for {
|
val app = for {
|
||||||
signal <- Resource.eval(SignallingRef[F, Boolean](false))
|
signal <- Resource.eval(SignallingRef[F, Boolean](false))
|
||||||
|
@ -7,6 +7,8 @@
|
|||||||
package docspell.joex
|
package docspell.joex
|
||||||
|
|
||||||
import cats.effect.{Async, Resource}
|
import cats.effect.{Async, Resource}
|
||||||
|
import fs2.io.file.Files
|
||||||
|
import fs2.io.net.Network
|
||||||
|
|
||||||
import docspell.analysis.TextAnalyser
|
import docspell.analysis.TextAnalyser
|
||||||
import docspell.backend.BackendCommands
|
import docspell.backend.BackendCommands
|
||||||
@ -46,7 +48,7 @@ import docspell.store.Store
|
|||||||
import emil.Emil
|
import emil.Emil
|
||||||
import org.http4s.client.Client
|
import org.http4s.client.Client
|
||||||
|
|
||||||
final class JoexTasks[F[_]: Async](
|
final class JoexTasks[F[_]: Async: Files: Network](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
itemOps: OItem[F],
|
itemOps: OItem[F],
|
||||||
@ -257,7 +259,7 @@ final class JoexTasks[F[_]: Async](
|
|||||||
|
|
||||||
object JoexTasks {
|
object JoexTasks {
|
||||||
|
|
||||||
def resource[F[_]: Async](
|
def resource[F[_]: Async: Files: Network](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
pools: Pools,
|
pools: Pools,
|
||||||
jobStoreModule: JobStoreModuleBuilder.Module[F],
|
jobStoreModule: JobStoreModuleBuilder.Module[F],
|
||||||
|
@ -43,7 +43,7 @@ object GenericItemAddonTask extends LoggerExtension {
|
|||||||
"ITEM_PDF_JSON" -> pdfMetaJson
|
"ITEM_PDF_JSON" -> pdfMetaJson
|
||||||
)
|
)
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
ops: AddonOps[F],
|
ops: AddonOps[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
trigger: AddonTriggerType,
|
trigger: AddonTriggerType,
|
||||||
@ -57,7 +57,7 @@ object GenericItemAddonTask extends LoggerExtension {
|
|||||||
data
|
data
|
||||||
)
|
)
|
||||||
|
|
||||||
def addonResult[F[_]: Async](
|
def addonResult[F[_]: Async: Files](
|
||||||
ops: AddonOps[F],
|
ops: AddonOps[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
trigger: AddonTriggerType,
|
trigger: AddonTriggerType,
|
||||||
@ -73,7 +73,7 @@ object GenericItemAddonTask extends LoggerExtension {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
def prepareItemData[F[_]: Async](
|
def prepareItemData[F[_]: Async: Files](
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
data: ItemData,
|
data: ItemData,
|
||||||
|
@ -9,6 +9,7 @@ package docspell.joex.addon
|
|||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.syntax.all._
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.addons.AddonTriggerType
|
import docspell.addons.AddonTriggerType
|
||||||
import docspell.backend.joex.AddonOps
|
import docspell.backend.joex.AddonOps
|
||||||
@ -26,7 +27,10 @@ object ItemAddonTask extends AddonTaskExtension {
|
|||||||
def onCancel[F[_]]: Task[F, Args, Unit] =
|
def onCancel[F[_]]: Task[F, Args, Unit] =
|
||||||
Task.log(_.warn(s"Cancelling ${name.id} task"))
|
Task.log(_.warn(s"Cancelling ${name.id} task"))
|
||||||
|
|
||||||
def apply[F[_]: Async](ops: AddonOps[F], store: Store[F]): Task[F, Args, Result] =
|
def apply[F[_]: Async: Files](
|
||||||
|
ops: AddonOps[F],
|
||||||
|
store: Store[F]
|
||||||
|
): Task[F, Args, Result] =
|
||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
(for {
|
(for {
|
||||||
item <- OptionT(
|
item <- OptionT(
|
||||||
|
@ -8,7 +8,7 @@ package docspell.joex.analysis
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
import docspell.analysis.split.TextSplitter
|
import docspell.analysis.split.TextSplitter
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
@ -39,7 +39,7 @@ object NerFile {
|
|||||||
private def jsonFilePath(directory: Path, collective: CollectiveId): Path =
|
private def jsonFilePath(directory: Path, collective: CollectiveId): Path =
|
||||||
directory.resolve(s"${collective.value}.json")
|
directory.resolve(s"${collective.value}.json")
|
||||||
|
|
||||||
def find[F[_]: Async](
|
def find[F[_]: Async: Files](
|
||||||
collective: CollectiveId,
|
collective: CollectiveId,
|
||||||
directory: Path
|
directory: Path
|
||||||
): F[Option[NerFile]] = {
|
): F[Option[NerFile]] = {
|
||||||
|
@ -9,7 +9,7 @@ package docspell.joex.analysis
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.effect.std.Semaphore
|
import cats.effect.std.Semaphore
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.common.util.File
|
import docspell.common.util.File
|
||||||
@ -32,7 +32,7 @@ object RegexNerFile {
|
|||||||
|
|
||||||
case class Config(maxEntries: Int, directory: Path, minTime: Duration)
|
case class Config(maxEntries: Int, directory: Path, minTime: Duration)
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
store: Store[F]
|
store: Store[F]
|
||||||
): Resource[F, RegexNerFile[F]] =
|
): Resource[F, RegexNerFile[F]] =
|
||||||
@ -41,7 +41,7 @@ object RegexNerFile {
|
|||||||
writer <- Resource.eval(Semaphore(1))
|
writer <- Resource.eval(Semaphore(1))
|
||||||
} yield new Impl[F](cfg.copy(directory = dir), store, writer)
|
} yield new Impl[F](cfg.copy(directory = dir), store, writer)
|
||||||
|
|
||||||
final private class Impl[F[_]: Async](
|
final private class Impl[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
writer: Semaphore[F] // TODO allow parallelism per collective
|
writer: Semaphore[F] // TODO allow parallelism per collective
|
||||||
|
@ -10,6 +10,7 @@ import java.time.format.DateTimeFormatter
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.syntax.all._
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.Files
|
||||||
import fs2.{Pipe, Stream}
|
import fs2.{Pipe, Stream}
|
||||||
|
|
||||||
import docspell.backend.ops.ODownloadAll
|
import docspell.backend.ops.ODownloadAll
|
||||||
@ -28,7 +29,7 @@ object DownloadZipTask {
|
|||||||
def onCancel[F[_]]: Task[F, Args, Unit] =
|
def onCancel[F[_]]: Task[F, Args, Unit] =
|
||||||
Task.log(_.warn(s"Cancelling ${DownloadZipArgs.taskName.id} task"))
|
Task.log(_.warn(s"Cancelling ${DownloadZipArgs.taskName.id} task"))
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
downloadOps: ODownloadAll[F]
|
downloadOps: ODownloadAll[F]
|
||||||
|
@ -8,6 +8,7 @@ package docspell.joex.hk
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.net.Network
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
@ -19,7 +20,7 @@ import org.http4s.client.Client
|
|||||||
import org.http4s.ember.client.EmberClientBuilder
|
import org.http4s.ember.client.EmberClientBuilder
|
||||||
|
|
||||||
object CheckNodesTask {
|
object CheckNodesTask {
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Network](
|
||||||
cfg: HouseKeepingConfig.CheckNodes,
|
cfg: HouseKeepingConfig.CheckNodes,
|
||||||
store: Store[F]
|
store: Store[F]
|
||||||
): Task[F, Unit, CleanupResult] =
|
): Task[F, Unit, CleanupResult] =
|
||||||
|
@ -8,6 +8,7 @@ package docspell.joex.hk
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.net.Network
|
||||||
|
|
||||||
import docspell.backend.ops.{ODownloadAll, OFileRepository}
|
import docspell.backend.ops.{ODownloadAll, OFileRepository}
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
@ -26,7 +27,7 @@ object HouseKeepingTask {
|
|||||||
|
|
||||||
val taskName: Ident = Ident.unsafe("housekeeping")
|
val taskName: Ident = Ident.unsafe("housekeeping")
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Network](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
fileRepo: OFileRepository[F],
|
fileRepo: OFileRepository[F],
|
||||||
|
@ -21,7 +21,7 @@ import docspell.store.records.RClassifierModel
|
|||||||
|
|
||||||
object Classify {
|
object Classify {
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
workingDir: Path,
|
workingDir: Path,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
|
@ -9,6 +9,7 @@ package docspell.joex.learn
|
|||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.analysis.TextAnalyser
|
import docspell.analysis.TextAnalyser
|
||||||
import docspell.backend.ops.OCollective
|
import docspell.backend.ops.OCollective
|
||||||
@ -28,7 +29,7 @@ object LearnClassifierTask {
|
|||||||
def onCancel[F[_]]: Task[F, Args, Unit] =
|
def onCancel[F[_]]: Task[F, Args, Unit] =
|
||||||
Task.log(_.warn("Cancelling learn-classifier task"))
|
Task.log(_.warn("Cancelling learn-classifier task"))
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
cfg: Config.TextAnalysis,
|
cfg: Config.TextAnalysis,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
analyser: TextAnalyser[F]
|
analyser: TextAnalyser[F]
|
||||||
@ -37,7 +38,7 @@ object LearnClassifierTask {
|
|||||||
.flatMap(_ => learnItemEntities(cfg, store, analyser))
|
.flatMap(_ => learnItemEntities(cfg, store, analyser))
|
||||||
.flatMap(_ => Task(_ => Sync[F].delay(System.gc())))
|
.flatMap(_ => Task(_ => Sync[F].delay(System.gc())))
|
||||||
|
|
||||||
private def learnItemEntities[F[_]: Async](
|
private def learnItemEntities[F[_]: Async: Files](
|
||||||
cfg: Config.TextAnalysis,
|
cfg: Config.TextAnalysis,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
analyser: TextAnalyser[F]
|
analyser: TextAnalyser[F]
|
||||||
@ -56,7 +57,7 @@ object LearnClassifierTask {
|
|||||||
else ().pure[F]
|
else ().pure[F]
|
||||||
}
|
}
|
||||||
|
|
||||||
private def learnTags[F[_]: Async](
|
private def learnTags[F[_]: Async: Files](
|
||||||
cfg: Config.TextAnalysis,
|
cfg: Config.TextAnalysis,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
analyser: TextAnalyser[F]
|
analyser: TextAnalyser[F]
|
||||||
|
@ -10,6 +10,7 @@ import cats.data.Kleisli
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.analysis.TextAnalyser
|
import docspell.analysis.TextAnalyser
|
||||||
import docspell.analysis.classifier.TextClassifier.Data
|
import docspell.analysis.classifier.TextClassifier.Data
|
||||||
@ -18,7 +19,7 @@ import docspell.scheduler._
|
|||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
|
|
||||||
object LearnItemEntities {
|
object LearnItemEntities {
|
||||||
def learnAll[F[_]: Async, A](
|
def learnAll[F[_]: Async: Files, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: CollectiveId,
|
collective: CollectiveId,
|
||||||
@ -32,7 +33,7 @@ object LearnItemEntities {
|
|||||||
.flatMap(_ => learnConcPerson(analyser, store, collective, maxItems, maxTextLen))
|
.flatMap(_ => learnConcPerson(analyser, store, collective, maxItems, maxTextLen))
|
||||||
.flatMap(_ => learnConcEquip(analyser, store, collective, maxItems, maxTextLen))
|
.flatMap(_ => learnConcEquip(analyser, store, collective, maxItems, maxTextLen))
|
||||||
|
|
||||||
def learnCorrOrg[F[_]: Async, A](
|
def learnCorrOrg[F[_]: Async: Files, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: CollectiveId,
|
collective: CollectiveId,
|
||||||
@ -44,7 +45,7 @@ object LearnItemEntities {
|
|||||||
_ => SelectItems.forCorrOrg(store, collective, maxItems, maxTextLen)
|
_ => SelectItems.forCorrOrg(store, collective, maxItems, maxTextLen)
|
||||||
)
|
)
|
||||||
|
|
||||||
def learnCorrPerson[F[_]: Async, A](
|
def learnCorrPerson[F[_]: Async: Files, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: CollectiveId,
|
collective: CollectiveId,
|
||||||
@ -56,7 +57,7 @@ object LearnItemEntities {
|
|||||||
_ => SelectItems.forCorrPerson(store, collective, maxItems, maxTextLen)
|
_ => SelectItems.forCorrPerson(store, collective, maxItems, maxTextLen)
|
||||||
)
|
)
|
||||||
|
|
||||||
def learnConcPerson[F[_]: Async, A](
|
def learnConcPerson[F[_]: Async: Files, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: CollectiveId,
|
collective: CollectiveId,
|
||||||
@ -68,7 +69,7 @@ object LearnItemEntities {
|
|||||||
_ => SelectItems.forConcPerson(store, collective, maxItems, maxTextLen)
|
_ => SelectItems.forConcPerson(store, collective, maxItems, maxTextLen)
|
||||||
)
|
)
|
||||||
|
|
||||||
def learnConcEquip[F[_]: Async, A](
|
def learnConcEquip[F[_]: Async: Files, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: CollectiveId,
|
collective: CollectiveId,
|
||||||
@ -80,7 +81,7 @@ object LearnItemEntities {
|
|||||||
_ => SelectItems.forConcEquip(store, collective, maxItems, maxTextLen)
|
_ => SelectItems.forConcEquip(store, collective, maxItems, maxTextLen)
|
||||||
)
|
)
|
||||||
|
|
||||||
private def learn[F[_]: Async, A](
|
private def learn[F[_]: Async: Files, A](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
collective: CollectiveId
|
collective: CollectiveId
|
||||||
|
@ -9,6 +9,7 @@ package docspell.joex.learn
|
|||||||
import cats.data.Kleisli
|
import cats.data.Kleisli
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.analysis.TextAnalyser
|
import docspell.analysis.TextAnalyser
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
@ -18,7 +19,7 @@ import docspell.store.records.RClassifierSetting
|
|||||||
|
|
||||||
object LearnTags {
|
object LearnTags {
|
||||||
|
|
||||||
def learnTagCategory[F[_]: Async, A](
|
def learnTagCategory[F[_]: Async: Files, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: CollectiveId,
|
collective: CollectiveId,
|
||||||
@ -43,7 +44,10 @@ object LearnTags {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
def learnAllTagCategories[F[_]: Async, A](analyser: TextAnalyser[F], store: Store[F])(
|
def learnAllTagCategories[F[_]: Async: Files, A](
|
||||||
|
analyser: TextAnalyser[F],
|
||||||
|
store: Store[F]
|
||||||
|
)(
|
||||||
collective: CollectiveId,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
|
@ -18,7 +18,7 @@ import docspell.store.records.RClassifierModel
|
|||||||
|
|
||||||
object StoreClassifierModel {
|
object StoreClassifierModel {
|
||||||
|
|
||||||
def handleModel[F[_]: Async](
|
def handleModel[F[_]: Async: Files](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
collective: CollectiveId,
|
collective: CollectiveId,
|
||||||
|
@ -11,6 +11,7 @@ import cats.data.OptionT
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.backend.JobFactory
|
import docspell.backend.JobFactory
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
@ -35,7 +36,10 @@ import docspell.store.Store
|
|||||||
object MultiUploadArchiveTask {
|
object MultiUploadArchiveTask {
|
||||||
type Args = ProcessItemArgs
|
type Args = ProcessItemArgs
|
||||||
|
|
||||||
def apply[F[_]: Async](store: Store[F], jobStore: JobStore[F]): Task[F, Args, Result] =
|
def apply[F[_]: Async: Files](
|
||||||
|
store: Store[F],
|
||||||
|
jobStore: JobStore[F]
|
||||||
|
): Task[F, Args, Result] =
|
||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
ctx.args.files
|
ctx.args.files
|
||||||
.traverse { file =>
|
.traverse { file =>
|
||||||
@ -104,7 +108,7 @@ object MultiUploadArchiveTask {
|
|||||||
.map(_.mimetype.matches(MimeType.zip))
|
.map(_.mimetype.matches(MimeType.zip))
|
||||||
.getOrElse(false)
|
.getOrElse(false)
|
||||||
|
|
||||||
private def extractZip[F[_]: Async](
|
private def extractZip[F[_]: Async: Files](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
args: Args
|
args: Args
|
||||||
)(file: ProcessItemArgs.File): Stream[F, ProcessItemArgs] =
|
)(file: ProcessItemArgs.File): Stream[F, ProcessItemArgs] =
|
||||||
|
@ -11,6 +11,7 @@ import cats.data.OptionT
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.convert.ConversionResult
|
import docspell.convert.ConversionResult
|
||||||
@ -35,9 +36,9 @@ object PdfConvTask {
|
|||||||
deriveEncoder[Args]
|
deriveEncoder[Args]
|
||||||
}
|
}
|
||||||
|
|
||||||
val taskName = Ident.unsafe("pdf-files-migration")
|
val taskName: Ident = Ident.unsafe("pdf-files-migration")
|
||||||
|
|
||||||
def apply[F[_]: Async](cfg: Config, store: Store[F]): Task[F, Args, Unit] =
|
def apply[F[_]: Async: Files](cfg: Config, store: Store[F]): Task[F, Args, Unit] =
|
||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
for {
|
for {
|
||||||
_ <- ctx.logger.info(s"Converting pdf file ${ctx.args} using ocrmypdf")
|
_ <- ctx.logger.info(s"Converting pdf file ${ctx.args} using ocrmypdf")
|
||||||
@ -89,7 +90,7 @@ object PdfConvTask {
|
|||||||
else none.pure[F]
|
else none.pure[F]
|
||||||
}
|
}
|
||||||
|
|
||||||
def convert[F[_]: Async](
|
def convert[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
ctx: Context[F, Args],
|
ctx: Context[F, Args],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
|
@ -11,6 +11,7 @@ import cats.data.{Kleisli, OptionT}
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.convert.ConversionResult.Handler
|
import docspell.convert.ConversionResult.Handler
|
||||||
@ -35,7 +36,7 @@ import docspell.store.records._
|
|||||||
object ConvertPdf {
|
object ConvertPdf {
|
||||||
type Args = ProcessItemArgs
|
type Args = ProcessItemArgs
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
cfg: ConvertConfig,
|
cfg: ConvertConfig,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
item: ItemData
|
item: ItemData
|
||||||
@ -76,7 +77,7 @@ object ConvertPdf {
|
|||||||
.map(_.mimetype)
|
.map(_.mimetype)
|
||||||
.getOrElse(MimeType.octetStream)
|
.getOrElse(MimeType.octetStream)
|
||||||
|
|
||||||
def convertSafe[F[_]: Async](
|
def convertSafe[F[_]: Async: Files](
|
||||||
cfg: ConvertConfig,
|
cfg: ConvertConfig,
|
||||||
sanitizeHtml: SanitizeHtml,
|
sanitizeHtml: SanitizeHtml,
|
||||||
ctx: Context[F, Args],
|
ctx: Context[F, Args],
|
||||||
|
@ -14,6 +14,7 @@ import cats.implicits._
|
|||||||
import cats.kernel.Monoid
|
import cats.kernel.Monoid
|
||||||
import cats.kernel.Order
|
import cats.kernel.Order
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.common.util.Zip
|
import docspell.common.util.Zip
|
||||||
@ -35,12 +36,12 @@ import emil.Mail
|
|||||||
object ExtractArchive {
|
object ExtractArchive {
|
||||||
type Args = ProcessItemArgs
|
type Args = ProcessItemArgs
|
||||||
|
|
||||||
def apply[F[_]: Async](store: Store[F])(
|
def apply[F[_]: Async: Files](store: Store[F])(
|
||||||
item: ItemData
|
item: ItemData
|
||||||
): Task[F, Args, ItemData] =
|
): Task[F, Args, ItemData] =
|
||||||
multiPass(store, item, None).map(_._2)
|
multiPass(store, item, None).map(_._2)
|
||||||
|
|
||||||
def multiPass[F[_]: Async](
|
def multiPass[F[_]: Async: Files](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
item: ItemData,
|
item: ItemData,
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
@ -50,7 +51,7 @@ object ExtractArchive {
|
|||||||
else multiPass(store, t._2, t._1)
|
else multiPass(store, t._2, t._1)
|
||||||
}
|
}
|
||||||
|
|
||||||
def singlePass[F[_]: Async](
|
def singlePass[F[_]: Async: Files](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
item: ItemData,
|
item: ItemData,
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
@ -91,7 +92,7 @@ object ExtractArchive {
|
|||||||
.map(_.mimetype)
|
.map(_.mimetype)
|
||||||
.getOrElse(MimeType.octetStream)
|
.getOrElse(MimeType.octetStream)
|
||||||
|
|
||||||
def extractSafe[F[_]: Async](
|
def extractSafe[F[_]: Async: Files](
|
||||||
ctx: Context[F, Args],
|
ctx: Context[F, Args],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
@ -137,7 +138,7 @@ object ExtractArchive {
|
|||||||
} yield extracted.copy(files = extracted.files.filter(_.id != ra.id))
|
} yield extracted.copy(files = extracted.files.filter(_.id != ra.id))
|
||||||
}
|
}
|
||||||
|
|
||||||
def extractZip[F[_]: Async](
|
def extractZip[F[_]: Async: Files](
|
||||||
ctx: Context[F, Args],
|
ctx: Context[F, Args],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
|
@ -10,6 +10,7 @@ import cats.data.OptionT
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.analysis.TextAnalyser
|
import docspell.analysis.TextAnalyser
|
||||||
import docspell.backend.joex.AddonOps
|
import docspell.backend.joex.AddonOps
|
||||||
@ -36,7 +37,7 @@ object ItemHandler {
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
def newItem[F[_]: Async](
|
def newItem[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
itemOps: OItem[F],
|
itemOps: OItem[F],
|
||||||
@ -82,7 +83,7 @@ object ItemHandler {
|
|||||||
def isLastRetry[F[_]]: Task[F, Args, Boolean] =
|
def isLastRetry[F[_]]: Task[F, Args, Boolean] =
|
||||||
Task(_.isLastRetry)
|
Task(_.isLastRetry)
|
||||||
|
|
||||||
def safeProcess[F[_]: Async](
|
def safeProcess[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
itemOps: OItem[F],
|
itemOps: OItem[F],
|
||||||
|
@ -8,6 +8,7 @@ package docspell.joex.process
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.addons.AddonTriggerType
|
import docspell.addons.AddonTriggerType
|
||||||
import docspell.analysis.TextAnalyser
|
import docspell.analysis.TextAnalyser
|
||||||
@ -22,7 +23,7 @@ import docspell.store.Store
|
|||||||
|
|
||||||
object ProcessItem {
|
object ProcessItem {
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
itemOps: OItem[F],
|
itemOps: OItem[F],
|
||||||
fts: FtsClient[F],
|
fts: FtsClient[F],
|
||||||
@ -40,7 +41,7 @@ object ProcessItem {
|
|||||||
.flatMap(RemoveEmptyItem(itemOps))
|
.flatMap(RemoveEmptyItem(itemOps))
|
||||||
.flatMap(RunAddons(addonOps, store, AddonTriggerType.FinalProcessItem))
|
.flatMap(RunAddons(addonOps, store, AddonTriggerType.FinalProcessItem))
|
||||||
|
|
||||||
def processAttachments[F[_]: Async](
|
def processAttachments[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
fts: FtsClient[F],
|
fts: FtsClient[F],
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
@ -49,7 +50,7 @@ object ProcessItem {
|
|||||||
)(item: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
)(item: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
||||||
processAttachments0[F](cfg, fts, analyser, regexNer, store, (30, 60, 90))(item)
|
processAttachments0[F](cfg, fts, analyser, regexNer, store, (30, 60, 90))(item)
|
||||||
|
|
||||||
def analysisOnly[F[_]: Async](
|
def analysisOnly[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
regexNer: RegexNerFile[F],
|
regexNer: RegexNerFile[F],
|
||||||
@ -61,7 +62,7 @@ object ProcessItem {
|
|||||||
.flatMap(CrossCheckProposals[F](store))
|
.flatMap(CrossCheckProposals[F](store))
|
||||||
.flatMap(SaveProposals[F](store))
|
.flatMap(SaveProposals[F](store))
|
||||||
|
|
||||||
private def processAttachments0[F[_]: Async](
|
private def processAttachments0[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
fts: FtsClient[F],
|
fts: FtsClient[F],
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
|
@ -9,6 +9,7 @@ package docspell.joex.process
|
|||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.addons.AddonTriggerType
|
import docspell.addons.AddonTriggerType
|
||||||
import docspell.analysis.TextAnalyser
|
import docspell.analysis.TextAnalyser
|
||||||
@ -30,7 +31,7 @@ import docspell.store.records.RItem
|
|||||||
object ReProcessItem {
|
object ReProcessItem {
|
||||||
type Args = ReProcessItemArgs
|
type Args = ReProcessItemArgs
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
fts: FtsClient[F],
|
fts: FtsClient[F],
|
||||||
itemOps: OItem[F],
|
itemOps: OItem[F],
|
||||||
@ -106,7 +107,7 @@ object ReProcessItem {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
def processFiles[F[_]: Async](
|
def processFiles[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
fts: FtsClient[F],
|
fts: FtsClient[F],
|
||||||
itemOps: OItem[F],
|
itemOps: OItem[F],
|
||||||
@ -162,7 +163,7 @@ object ReProcessItem {
|
|||||||
def isLastRetry[F[_]]: Task[F, Args, Boolean] =
|
def isLastRetry[F[_]]: Task[F, Args, Boolean] =
|
||||||
Task(_.isLastRetry)
|
Task(_.isLastRetry)
|
||||||
|
|
||||||
def safeProcess[F[_]: Async](
|
def safeProcess[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
fts: FtsClient[F],
|
fts: FtsClient[F],
|
||||||
itemOps: OItem[F],
|
itemOps: OItem[F],
|
||||||
|
@ -8,6 +8,7 @@ package docspell.joex.process
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.syntax.all._
|
import cats.syntax.all._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.addons.AddonTriggerType
|
import docspell.addons.AddonTriggerType
|
||||||
import docspell.backend.joex.AddonOps
|
import docspell.backend.joex.AddonOps
|
||||||
@ -22,7 +23,7 @@ import docspell.store.Store
|
|||||||
object RunAddons {
|
object RunAddons {
|
||||||
type Args = ProcessItemArgs
|
type Args = ProcessItemArgs
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
ops: AddonOps[F],
|
ops: AddonOps[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
trigger: AddonTriggerType
|
trigger: AddonTriggerType
|
||||||
|
@ -9,6 +9,7 @@ package docspell.joex.process
|
|||||||
import cats.Traverse
|
import cats.Traverse
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.analysis.classifier.TextClassifier
|
import docspell.analysis.classifier.TextClassifier
|
||||||
import docspell.analysis.{NlpSettings, TextAnalyser}
|
import docspell.analysis.{NlpSettings, TextAnalyser}
|
||||||
@ -26,7 +27,7 @@ import docspell.store.records.{RAttachmentMeta, RClassifierSetting}
|
|||||||
object TextAnalysis {
|
object TextAnalysis {
|
||||||
type Args = ProcessItemArgs
|
type Args = ProcessItemArgs
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async: Files](
|
||||||
cfg: Config.TextAnalysis,
|
cfg: Config.TextAnalysis,
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
nerFile: RegexNerFile[F],
|
nerFile: RegexNerFile[F],
|
||||||
@ -87,7 +88,7 @@ object TextAnalysis {
|
|||||||
} yield (rm.copy(nerlabels = labels.all.toList), AttachmentDates(rm, labels.dates))
|
} yield (rm.copy(nerlabels = labels.all.toList), AttachmentDates(rm, labels.dates))
|
||||||
}
|
}
|
||||||
|
|
||||||
def predictTags[F[_]: Async](
|
def predictTags[F[_]: Async: Files](
|
||||||
ctx: Context[F, Args],
|
ctx: Context[F, Args],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
cfg: Config.TextAnalysis,
|
cfg: Config.TextAnalysis,
|
||||||
@ -107,7 +108,7 @@ object TextAnalysis {
|
|||||||
} yield tags.flatten
|
} yield tags.flatten
|
||||||
}
|
}
|
||||||
|
|
||||||
def predictItemEntities[F[_]: Async](
|
def predictItemEntities[F[_]: Async: Files](
|
||||||
ctx: Context[F, Args],
|
ctx: Context[F, Args],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
cfg: Config.TextAnalysis,
|
cfg: Config.TextAnalysis,
|
||||||
@ -139,7 +140,7 @@ object TextAnalysis {
|
|||||||
.map(MetaProposalList.apply)
|
.map(MetaProposalList.apply)
|
||||||
}
|
}
|
||||||
|
|
||||||
private def makeClassify[F[_]: Async](
|
private def makeClassify[F[_]: Async: Files](
|
||||||
ctx: Context[F, Args],
|
ctx: Context[F, Args],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
cfg: Config.TextAnalysis,
|
cfg: Config.TextAnalysis,
|
||||||
|
@ -9,6 +9,7 @@ package docspell.joex.process
|
|||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.extract.{ExtractConfig, ExtractResult, Extraction}
|
import docspell.extract.{ExtractConfig, ExtractResult, Extraction}
|
||||||
@ -19,7 +20,7 @@ import docspell.store.records.{RAttachment, RAttachmentMeta, RFileMeta}
|
|||||||
|
|
||||||
object TextExtraction {
|
object TextExtraction {
|
||||||
|
|
||||||
def apply[F[_]: Async](cfg: ExtractConfig, fts: FtsClient[F], store: Store[F])(
|
def apply[F[_]: Async: Files](cfg: ExtractConfig, fts: FtsClient[F], store: Store[F])(
|
||||||
item: ItemData
|
item: ItemData
|
||||||
): Task[F, ProcessItemArgs, ItemData] =
|
): Task[F, ProcessItemArgs, ItemData] =
|
||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
@ -66,7 +67,7 @@ object TextExtraction {
|
|||||||
|
|
||||||
case class Result(am: RAttachmentMeta, td: TextData, tags: List[String] = Nil)
|
case class Result(am: RAttachmentMeta, td: TextData, tags: List[String] = Nil)
|
||||||
|
|
||||||
def extractTextIfEmpty[F[_]: Async](
|
def extractTextIfEmpty[F[_]: Async: Files](
|
||||||
ctx: Context[F, ProcessItemArgs],
|
ctx: Context[F, ProcessItemArgs],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
cfg: ExtractConfig,
|
cfg: ExtractConfig,
|
||||||
@ -100,7 +101,7 @@ object TextExtraction {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def extractTextToMeta[F[_]: Async](
|
def extractTextToMeta[F[_]: Async: Files](
|
||||||
ctx: Context[F, _],
|
ctx: Context[F, _],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
cfg: ExtractConfig,
|
cfg: ExtractConfig,
|
||||||
@ -143,7 +144,7 @@ object TextExtraction {
|
|||||||
.flatMap(mt => extr.extractText(data, DataType(mt), lang))
|
.flatMap(mt => extr.extractText(data, DataType(mt), lang))
|
||||||
}
|
}
|
||||||
|
|
||||||
private def extractTextFallback[F[_]: Async](
|
private def extractTextFallback[F[_]: Async: Files](
|
||||||
ctx: Context[F, _],
|
ctx: Context[F, _],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
cfg: ExtractConfig,
|
cfg: ExtractConfig,
|
||||||
|
@ -8,6 +8,7 @@ package docspell.joexapi.client
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.net.Network
|
||||||
|
|
||||||
import docspell.common.{Ident, LenientUri}
|
import docspell.common.{Ident, LenientUri}
|
||||||
import docspell.joexapi.model.{AddonSupport, BasicResult}
|
import docspell.joexapi.model.{AddonSupport, BasicResult}
|
||||||
@ -72,6 +73,6 @@ object JoexClient {
|
|||||||
Uri.unsafeFromString(u.asString)
|
Uri.unsafeFromString(u.asString)
|
||||||
}
|
}
|
||||||
|
|
||||||
def resource[F[_]: Async]: Resource[F, JoexClient[F]] =
|
def resource[F[_]: Async: Network]: Resource[F, JoexClient[F]] =
|
||||||
EmberClientBuilder.default[F].build.map(apply[F])
|
EmberClientBuilder.default[F].build.map(apply[F])
|
||||||
}
|
}
|
||||||
|
@ -10,6 +10,7 @@ import java.security.SecureRandom
|
|||||||
|
|
||||||
import cats.Monoid
|
import cats.Monoid
|
||||||
import cats.effect.Async
|
import cats.effect.Async
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.backend.auth.Login
|
import docspell.backend.auth.Login
|
||||||
import docspell.backend.signup.{Config => SignupConfig}
|
import docspell.backend.signup.{Config => SignupConfig}
|
||||||
@ -30,7 +31,7 @@ object ConfigFile {
|
|||||||
// IntelliJ is wrong, this is required
|
// IntelliJ is wrong, this is required
|
||||||
import Implicits._
|
import Implicits._
|
||||||
|
|
||||||
def loadConfig[F[_]: Async](args: List[String]): F[Config] = {
|
def loadConfig[F[_]: Async: Files](args: List[String]): F[Config] = {
|
||||||
val logger = docspell.logging.getLogger[F]
|
val logger = docspell.logging.getLogger[F]
|
||||||
val validate =
|
val validate =
|
||||||
Validation.of(
|
Validation.of(
|
||||||
|
@ -9,6 +9,7 @@ package docspell.restserver
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import fs2.concurrent.Topic
|
import fs2.concurrent.Topic
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.backend.BackendApp
|
import docspell.backend.BackendApp
|
||||||
import docspell.backend.auth.{AuthToken, ShareToken}
|
import docspell.backend.auth.{AuthToken, ShareToken}
|
||||||
@ -36,7 +37,7 @@ import org.http4s.client.Client
|
|||||||
import org.http4s.server.Router
|
import org.http4s.server.Router
|
||||||
import org.http4s.server.websocket.WebSocketBuilder2
|
import org.http4s.server.websocket.WebSocketBuilder2
|
||||||
|
|
||||||
final class RestAppImpl[F[_]: Async](
|
final class RestAppImpl[F[_]: Async: Files](
|
||||||
val config: Config,
|
val config: Config,
|
||||||
val backend: BackendApp[F],
|
val backend: BackendApp[F],
|
||||||
httpClient: Client[F],
|
httpClient: Client[F],
|
||||||
@ -162,7 +163,7 @@ final class RestAppImpl[F[_]: Async](
|
|||||||
|
|
||||||
object RestAppImpl {
|
object RestAppImpl {
|
||||||
|
|
||||||
def create[F[_]: Async](
|
def create[F[_]: Async: Files](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
pools: Pools,
|
pools: Pools,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
|
@ -12,6 +12,8 @@ import cats.effect._
|
|||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import fs2.concurrent.Topic
|
import fs2.concurrent.Topic
|
||||||
|
import fs2.io.file.Files
|
||||||
|
import fs2.io.net.Network
|
||||||
|
|
||||||
import docspell.backend.msg.Topics
|
import docspell.backend.msg.Topics
|
||||||
import docspell.backend.ops.ONode
|
import docspell.backend.ops.ONode
|
||||||
@ -35,7 +37,7 @@ import org.http4s.server.websocket.WebSocketBuilder2
|
|||||||
|
|
||||||
object RestServer {
|
object RestServer {
|
||||||
|
|
||||||
def serve[F[_]: Async](
|
def serve[F[_]: Async: Files: Network](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
pools: Pools
|
pools: Pools
|
||||||
): F[ExitCode] =
|
): F[ExitCode] =
|
||||||
@ -55,7 +57,7 @@ object RestServer {
|
|||||||
.flatMap { case (restApp, pubSub, setting) =>
|
.flatMap { case (restApp, pubSub, setting) =>
|
||||||
Stream(
|
Stream(
|
||||||
restApp.subscriptions,
|
restApp.subscriptions,
|
||||||
restApp.eventConsume(2),
|
restApp.eventConsume(maxConcurrent = 2),
|
||||||
Stream.resource {
|
Stream.resource {
|
||||||
if (cfg.serverOptions.enableHttp2)
|
if (cfg.serverOptions.enableHttp2)
|
||||||
EmberServerBuilder
|
EmberServerBuilder
|
||||||
@ -81,7 +83,7 @@ object RestServer {
|
|||||||
(server ++ Stream(keepAlive)).parJoinUnbounded.compile.drain.as(ExitCode.Success)
|
(server ++ Stream(keepAlive)).parJoinUnbounded.compile.drain.as(ExitCode.Success)
|
||||||
} yield exit
|
} yield exit
|
||||||
|
|
||||||
def createApp[F[_]: Async](
|
def createApp[F[_]: Async: Files: Network](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
pools: Pools,
|
pools: Pools,
|
||||||
wsTopic: Topic[F, OutputEvent]
|
wsTopic: Topic[F, OutputEvent]
|
||||||
|
@ -9,6 +9,7 @@ package docspell.restserver.routes
|
|||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
import docspell.backend.BackendApp
|
import docspell.backend.BackendApp
|
||||||
import docspell.backend.auth.AuthToken
|
import docspell.backend.auth.AuthToken
|
||||||
@ -25,7 +26,7 @@ import org.log4s._
|
|||||||
object UploadRoutes {
|
object UploadRoutes {
|
||||||
private[this] val logger = getLogger
|
private[this] val logger = getLogger
|
||||||
|
|
||||||
def secured[F[_]: Async](
|
def secured[F[_]: Async: Files](
|
||||||
backend: BackendApp[F],
|
backend: BackendApp[F],
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
user: AuthToken
|
user: AuthToken
|
||||||
@ -50,7 +51,7 @@ object UploadRoutes {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def open[F[_]: Async](backend: BackendApp[F], cfg: Config): HttpRoutes[F] = {
|
def open[F[_]: Async: Files](backend: BackendApp[F], cfg: Config): HttpRoutes[F] = {
|
||||||
val dsl = new Http4sDsl[F] with ResponseGenerator[F] {}
|
val dsl = new Http4sDsl[F] with ResponseGenerator[F] {}
|
||||||
import dsl._
|
import dsl._
|
||||||
|
|
||||||
@ -78,7 +79,7 @@ object UploadRoutes {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private def submitFiles[F[_]: Async](
|
private def submitFiles[F[_]: Async: Files](
|
||||||
backend: BackendApp[F],
|
backend: BackendApp[F],
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
accOrSrc: Either[Ident, CollectiveId],
|
accOrSrc: Either[Ident, CollectiveId],
|
||||||
|
@ -20,9 +20,9 @@ object Dependencies {
|
|||||||
val EmilVersion = "0.13.0"
|
val EmilVersion = "0.13.0"
|
||||||
val FlexmarkVersion = "0.64.8"
|
val FlexmarkVersion = "0.64.8"
|
||||||
val FlywayVersion = "9.22.3"
|
val FlywayVersion = "9.22.3"
|
||||||
val Fs2Version = "3.6.1"
|
val Fs2Version = "3.9.2"
|
||||||
val H2Version = "2.2.224"
|
val H2Version = "2.2.224"
|
||||||
val Http4sVersion = "0.23.18"
|
val Http4sVersion = "0.23.23"
|
||||||
val Icu4jVersion = "74.1"
|
val Icu4jVersion = "74.1"
|
||||||
val JavaOtpVersion = "0.4.0"
|
val JavaOtpVersion = "0.4.0"
|
||||||
val JsoupVersion = "1.16.2"
|
val JsoupVersion = "1.16.2"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user