mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-06-22 02:18:26 +00:00
Updated following dependencies as they need changes to the code to work properly:
- Scala - fs2 - http4s
This commit is contained in:
@ -7,6 +7,7 @@
|
||||
package docspell.joex
|
||||
|
||||
import cats.effect.Async
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.config.Implicits._
|
||||
import docspell.config.{ConfigFactory, FtsType, Validation}
|
||||
@ -25,7 +26,7 @@ object ConfigFile {
|
||||
// IntelliJ is wrong, this is required
|
||||
import Implicits._
|
||||
|
||||
def loadConfig[F[_]: Async](args: List[String]): F[Config] = {
|
||||
def loadConfig[F[_]: Async: Files](args: List[String]): F[Config] = {
|
||||
val logger = docspell.logging.getLogger[F]
|
||||
ConfigFactory
|
||||
.default[F, Config](logger, "docspell.joex")(args, validate)
|
||||
|
@ -9,6 +9,8 @@ package docspell.joex
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.concurrent.SignallingRef
|
||||
import fs2.io.file.Files
|
||||
import fs2.io.net.Network
|
||||
|
||||
import docspell.backend.MailAddressCodec
|
||||
import docspell.backend.joex.FindJobOwnerAccount
|
||||
@ -45,7 +47,7 @@ final class JoexAppImpl[F[_]: Async](
|
||||
def init: F[Unit] = {
|
||||
val run = scheduler.start.compile.drain
|
||||
val prun = periodicScheduler.start.compile.drain
|
||||
val eventConsume = notificationMod.consumeAllEvents(2).compile.drain
|
||||
val eventConsume = notificationMod.consumeAllEvents(maxConcurrent = 2).compile.drain
|
||||
for {
|
||||
_ <- scheduleBackgroundTasks
|
||||
_ <- Async[F].start(run)
|
||||
@ -62,7 +64,9 @@ final class JoexAppImpl[F[_]: Async](
|
||||
store.transact(RJobLog.findLogs(jobId))
|
||||
|
||||
def initShutdown: F[Unit] =
|
||||
periodicScheduler.shutdown *> scheduler.shutdown(false) *> termSignal.set(true)
|
||||
periodicScheduler.shutdown *> scheduler.shutdown(cancelAll = false) *> termSignal.set(
|
||||
true
|
||||
)
|
||||
|
||||
private def scheduleBackgroundTasks: F[Unit] =
|
||||
HouseKeepingTask
|
||||
@ -81,7 +85,8 @@ final class JoexAppImpl[F[_]: Async](
|
||||
private def scheduleEmptyTrashTasks: F[Unit] =
|
||||
store
|
||||
.transact(
|
||||
REmptyTrashSetting.findForAllCollectives(OCollective.EmptyTrash.default, 50)
|
||||
REmptyTrashSetting
|
||||
.findForAllCollectives(OCollective.EmptyTrash.default, chunkSize = 50)
|
||||
)
|
||||
.evalMap { es =>
|
||||
val args = EmptyTrashArgs(es.cid, es.minAge)
|
||||
@ -98,7 +103,7 @@ final class JoexAppImpl[F[_]: Async](
|
||||
|
||||
object JoexAppImpl extends MailAddressCodec {
|
||||
|
||||
def create[F[_]: Async](
|
||||
def create[F[_]: Async: Files: Network](
|
||||
cfg: Config,
|
||||
termSignal: SignallingRef[F, Boolean],
|
||||
store: Store[F],
|
||||
@ -107,12 +112,14 @@ object JoexAppImpl extends MailAddressCodec {
|
||||
pools: Pools
|
||||
): Resource[F, JoexApp[F]] =
|
||||
for {
|
||||
joexLogger <- Resource.pure(docspell.logging.getLogger[F](s"joex-${cfg.appId.id}"))
|
||||
joexLogger <- Resource.pure(
|
||||
docspell.logging.getLogger[F](name = s"joex-${cfg.appId.id}")
|
||||
)
|
||||
pubSubT = PubSubT(pubSub, joexLogger)
|
||||
javaEmil =
|
||||
JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug))
|
||||
notificationMod <- Resource.eval(
|
||||
NotificationModuleImpl[F](store, javaEmil, httpClient, 200)
|
||||
NotificationModuleImpl[F](store, javaEmil, httpClient, queueSize = 200)
|
||||
)
|
||||
|
||||
jobStoreModule = JobStoreModuleBuilder(store)
|
||||
|
@ -9,6 +9,8 @@ package docspell.joex
|
||||
import cats.effect._
|
||||
import fs2.Stream
|
||||
import fs2.concurrent.SignallingRef
|
||||
import fs2.io.file.Files
|
||||
import fs2.io.net.Network
|
||||
|
||||
import docspell.backend.msg.Topics
|
||||
import docspell.common.Pools
|
||||
@ -32,7 +34,10 @@ object JoexServer {
|
||||
exitRef: Ref[F, ExitCode]
|
||||
)
|
||||
|
||||
def stream[F[_]: Async](cfg: Config, pools: Pools): Stream[F, Nothing] = {
|
||||
def stream[F[_]: Async: Files: Network](
|
||||
cfg: Config,
|
||||
pools: Pools
|
||||
): Stream[F, Nothing] = {
|
||||
|
||||
val app = for {
|
||||
signal <- Resource.eval(SignallingRef[F, Boolean](false))
|
||||
|
@ -7,6 +7,8 @@
|
||||
package docspell.joex
|
||||
|
||||
import cats.effect.{Async, Resource}
|
||||
import fs2.io.file.Files
|
||||
import fs2.io.net.Network
|
||||
|
||||
import docspell.analysis.TextAnalyser
|
||||
import docspell.backend.BackendCommands
|
||||
@ -46,7 +48,7 @@ import docspell.store.Store
|
||||
import emil.Emil
|
||||
import org.http4s.client.Client
|
||||
|
||||
final class JoexTasks[F[_]: Async](
|
||||
final class JoexTasks[F[_]: Async: Files: Network](
|
||||
cfg: Config,
|
||||
store: Store[F],
|
||||
itemOps: OItem[F],
|
||||
@ -257,7 +259,7 @@ final class JoexTasks[F[_]: Async](
|
||||
|
||||
object JoexTasks {
|
||||
|
||||
def resource[F[_]: Async](
|
||||
def resource[F[_]: Async: Files: Network](
|
||||
cfg: Config,
|
||||
pools: Pools,
|
||||
jobStoreModule: JobStoreModuleBuilder.Module[F],
|
||||
|
@ -43,7 +43,7 @@ object GenericItemAddonTask extends LoggerExtension {
|
||||
"ITEM_PDF_JSON" -> pdfMetaJson
|
||||
)
|
||||
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Files](
|
||||
ops: AddonOps[F],
|
||||
store: Store[F],
|
||||
trigger: AddonTriggerType,
|
||||
@ -57,7 +57,7 @@ object GenericItemAddonTask extends LoggerExtension {
|
||||
data
|
||||
)
|
||||
|
||||
def addonResult[F[_]: Async](
|
||||
def addonResult[F[_]: Async: Files](
|
||||
ops: AddonOps[F],
|
||||
store: Store[F],
|
||||
trigger: AddonTriggerType,
|
||||
@ -73,7 +73,7 @@ object GenericItemAddonTask extends LoggerExtension {
|
||||
)
|
||||
}
|
||||
|
||||
def prepareItemData[F[_]: Async](
|
||||
def prepareItemData[F[_]: Async: Files](
|
||||
logger: Logger[F],
|
||||
store: Store[F],
|
||||
data: ItemData,
|
||||
|
@ -9,6 +9,7 @@ package docspell.joex.addon
|
||||
import cats.data.OptionT
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.addons.AddonTriggerType
|
||||
import docspell.backend.joex.AddonOps
|
||||
@ -26,7 +27,10 @@ object ItemAddonTask extends AddonTaskExtension {
|
||||
def onCancel[F[_]]: Task[F, Args, Unit] =
|
||||
Task.log(_.warn(s"Cancelling ${name.id} task"))
|
||||
|
||||
def apply[F[_]: Async](ops: AddonOps[F], store: Store[F]): Task[F, Args, Result] =
|
||||
def apply[F[_]: Async: Files](
|
||||
ops: AddonOps[F],
|
||||
store: Store[F]
|
||||
): Task[F, Args, Result] =
|
||||
Task { ctx =>
|
||||
(for {
|
||||
item <- OptionT(
|
||||
|
@ -8,7 +8,7 @@ package docspell.joex.analysis
|
||||
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.io.file.Path
|
||||
import fs2.io.file.{Files, Path}
|
||||
|
||||
import docspell.analysis.split.TextSplitter
|
||||
import docspell.common._
|
||||
@ -39,7 +39,7 @@ object NerFile {
|
||||
private def jsonFilePath(directory: Path, collective: CollectiveId): Path =
|
||||
directory.resolve(s"${collective.value}.json")
|
||||
|
||||
def find[F[_]: Async](
|
||||
def find[F[_]: Async: Files](
|
||||
collective: CollectiveId,
|
||||
directory: Path
|
||||
): F[Option[NerFile]] = {
|
||||
|
@ -9,7 +9,7 @@ package docspell.joex.analysis
|
||||
import cats.effect._
|
||||
import cats.effect.std.Semaphore
|
||||
import cats.implicits._
|
||||
import fs2.io.file.Path
|
||||
import fs2.io.file.{Files, Path}
|
||||
|
||||
import docspell.common._
|
||||
import docspell.common.util.File
|
||||
@ -32,7 +32,7 @@ object RegexNerFile {
|
||||
|
||||
case class Config(maxEntries: Int, directory: Path, minTime: Duration)
|
||||
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
store: Store[F]
|
||||
): Resource[F, RegexNerFile[F]] =
|
||||
@ -41,7 +41,7 @@ object RegexNerFile {
|
||||
writer <- Resource.eval(Semaphore(1))
|
||||
} yield new Impl[F](cfg.copy(directory = dir), store, writer)
|
||||
|
||||
final private class Impl[F[_]: Async](
|
||||
final private class Impl[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
store: Store[F],
|
||||
writer: Semaphore[F] // TODO allow parallelism per collective
|
||||
|
@ -10,6 +10,7 @@ import java.time.format.DateTimeFormatter
|
||||
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
import fs2.io.file.Files
|
||||
import fs2.{Pipe, Stream}
|
||||
|
||||
import docspell.backend.ops.ODownloadAll
|
||||
@ -28,7 +29,7 @@ object DownloadZipTask {
|
||||
def onCancel[F[_]]: Task[F, Args, Unit] =
|
||||
Task.log(_.warn(s"Cancelling ${DownloadZipArgs.taskName.id} task"))
|
||||
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Files](
|
||||
chunkSize: Int,
|
||||
store: Store[F],
|
||||
downloadOps: ODownloadAll[F]
|
||||
|
@ -8,6 +8,7 @@ package docspell.joex.hk
|
||||
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.io.net.Network
|
||||
|
||||
import docspell.common._
|
||||
import docspell.logging.Logger
|
||||
@ -19,7 +20,7 @@ import org.http4s.client.Client
|
||||
import org.http4s.ember.client.EmberClientBuilder
|
||||
|
||||
object CheckNodesTask {
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Network](
|
||||
cfg: HouseKeepingConfig.CheckNodes,
|
||||
store: Store[F]
|
||||
): Task[F, Unit, CleanupResult] =
|
||||
|
@ -8,6 +8,7 @@ package docspell.joex.hk
|
||||
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.io.net.Network
|
||||
|
||||
import docspell.backend.ops.{ODownloadAll, OFileRepository}
|
||||
import docspell.common._
|
||||
@ -26,7 +27,7 @@ object HouseKeepingTask {
|
||||
|
||||
val taskName: Ident = Ident.unsafe("housekeeping")
|
||||
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Network](
|
||||
cfg: Config,
|
||||
store: Store[F],
|
||||
fileRepo: OFileRepository[F],
|
||||
|
@ -21,7 +21,7 @@ import docspell.store.records.RClassifierModel
|
||||
|
||||
object Classify {
|
||||
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Files](
|
||||
logger: Logger[F],
|
||||
workingDir: Path,
|
||||
store: Store[F],
|
||||
|
@ -9,6 +9,7 @@ package docspell.joex.learn
|
||||
import cats.data.OptionT
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.analysis.TextAnalyser
|
||||
import docspell.backend.ops.OCollective
|
||||
@ -28,7 +29,7 @@ object LearnClassifierTask {
|
||||
def onCancel[F[_]]: Task[F, Args, Unit] =
|
||||
Task.log(_.warn("Cancelling learn-classifier task"))
|
||||
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Files](
|
||||
cfg: Config.TextAnalysis,
|
||||
store: Store[F],
|
||||
analyser: TextAnalyser[F]
|
||||
@ -37,7 +38,7 @@ object LearnClassifierTask {
|
||||
.flatMap(_ => learnItemEntities(cfg, store, analyser))
|
||||
.flatMap(_ => Task(_ => Sync[F].delay(System.gc())))
|
||||
|
||||
private def learnItemEntities[F[_]: Async](
|
||||
private def learnItemEntities[F[_]: Async: Files](
|
||||
cfg: Config.TextAnalysis,
|
||||
store: Store[F],
|
||||
analyser: TextAnalyser[F]
|
||||
@ -56,7 +57,7 @@ object LearnClassifierTask {
|
||||
else ().pure[F]
|
||||
}
|
||||
|
||||
private def learnTags[F[_]: Async](
|
||||
private def learnTags[F[_]: Async: Files](
|
||||
cfg: Config.TextAnalysis,
|
||||
store: Store[F],
|
||||
analyser: TextAnalyser[F]
|
||||
|
@ -10,6 +10,7 @@ import cats.data.Kleisli
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.Stream
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.analysis.TextAnalyser
|
||||
import docspell.analysis.classifier.TextClassifier.Data
|
||||
@ -18,7 +19,7 @@ import docspell.scheduler._
|
||||
import docspell.store.Store
|
||||
|
||||
object LearnItemEntities {
|
||||
def learnAll[F[_]: Async, A](
|
||||
def learnAll[F[_]: Async: Files, A](
|
||||
analyser: TextAnalyser[F],
|
||||
store: Store[F],
|
||||
collective: CollectiveId,
|
||||
@ -32,7 +33,7 @@ object LearnItemEntities {
|
||||
.flatMap(_ => learnConcPerson(analyser, store, collective, maxItems, maxTextLen))
|
||||
.flatMap(_ => learnConcEquip(analyser, store, collective, maxItems, maxTextLen))
|
||||
|
||||
def learnCorrOrg[F[_]: Async, A](
|
||||
def learnCorrOrg[F[_]: Async: Files, A](
|
||||
analyser: TextAnalyser[F],
|
||||
store: Store[F],
|
||||
collective: CollectiveId,
|
||||
@ -44,7 +45,7 @@ object LearnItemEntities {
|
||||
_ => SelectItems.forCorrOrg(store, collective, maxItems, maxTextLen)
|
||||
)
|
||||
|
||||
def learnCorrPerson[F[_]: Async, A](
|
||||
def learnCorrPerson[F[_]: Async: Files, A](
|
||||
analyser: TextAnalyser[F],
|
||||
store: Store[F],
|
||||
collective: CollectiveId,
|
||||
@ -56,7 +57,7 @@ object LearnItemEntities {
|
||||
_ => SelectItems.forCorrPerson(store, collective, maxItems, maxTextLen)
|
||||
)
|
||||
|
||||
def learnConcPerson[F[_]: Async, A](
|
||||
def learnConcPerson[F[_]: Async: Files, A](
|
||||
analyser: TextAnalyser[F],
|
||||
store: Store[F],
|
||||
collective: CollectiveId,
|
||||
@ -68,7 +69,7 @@ object LearnItemEntities {
|
||||
_ => SelectItems.forConcPerson(store, collective, maxItems, maxTextLen)
|
||||
)
|
||||
|
||||
def learnConcEquip[F[_]: Async, A](
|
||||
def learnConcEquip[F[_]: Async: Files, A](
|
||||
analyser: TextAnalyser[F],
|
||||
store: Store[F],
|
||||
collective: CollectiveId,
|
||||
@ -80,7 +81,7 @@ object LearnItemEntities {
|
||||
_ => SelectItems.forConcEquip(store, collective, maxItems, maxTextLen)
|
||||
)
|
||||
|
||||
private def learn[F[_]: Async, A](
|
||||
private def learn[F[_]: Async: Files, A](
|
||||
store: Store[F],
|
||||
analyser: TextAnalyser[F],
|
||||
collective: CollectiveId
|
||||
|
@ -9,6 +9,7 @@ package docspell.joex.learn
|
||||
import cats.data.Kleisli
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.analysis.TextAnalyser
|
||||
import docspell.common._
|
||||
@ -18,7 +19,7 @@ import docspell.store.records.RClassifierSetting
|
||||
|
||||
object LearnTags {
|
||||
|
||||
def learnTagCategory[F[_]: Async, A](
|
||||
def learnTagCategory[F[_]: Async: Files, A](
|
||||
analyser: TextAnalyser[F],
|
||||
store: Store[F],
|
||||
collective: CollectiveId,
|
||||
@ -43,7 +44,10 @@ object LearnTags {
|
||||
)
|
||||
}
|
||||
|
||||
def learnAllTagCategories[F[_]: Async, A](analyser: TextAnalyser[F], store: Store[F])(
|
||||
def learnAllTagCategories[F[_]: Async: Files, A](
|
||||
analyser: TextAnalyser[F],
|
||||
store: Store[F]
|
||||
)(
|
||||
collective: CollectiveId,
|
||||
maxItems: Int,
|
||||
maxTextLen: Int
|
||||
|
@ -18,7 +18,7 @@ import docspell.store.records.RClassifierModel
|
||||
|
||||
object StoreClassifierModel {
|
||||
|
||||
def handleModel[F[_]: Async](
|
||||
def handleModel[F[_]: Async: Files](
|
||||
store: Store[F],
|
||||
logger: Logger[F],
|
||||
collective: CollectiveId,
|
||||
|
@ -11,6 +11,7 @@ import cats.data.OptionT
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.Stream
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.backend.JobFactory
|
||||
import docspell.common._
|
||||
@ -35,7 +36,10 @@ import docspell.store.Store
|
||||
object MultiUploadArchiveTask {
|
||||
type Args = ProcessItemArgs
|
||||
|
||||
def apply[F[_]: Async](store: Store[F], jobStore: JobStore[F]): Task[F, Args, Result] =
|
||||
def apply[F[_]: Async: Files](
|
||||
store: Store[F],
|
||||
jobStore: JobStore[F]
|
||||
): Task[F, Args, Result] =
|
||||
Task { ctx =>
|
||||
ctx.args.files
|
||||
.traverse { file =>
|
||||
@ -104,7 +108,7 @@ object MultiUploadArchiveTask {
|
||||
.map(_.mimetype.matches(MimeType.zip))
|
||||
.getOrElse(false)
|
||||
|
||||
private def extractZip[F[_]: Async](
|
||||
private def extractZip[F[_]: Async: Files](
|
||||
store: Store[F],
|
||||
args: Args
|
||||
)(file: ProcessItemArgs.File): Stream[F, ProcessItemArgs] =
|
||||
|
@ -11,6 +11,7 @@ import cats.data.OptionT
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.Stream
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.common._
|
||||
import docspell.convert.ConversionResult
|
||||
@ -35,9 +36,9 @@ object PdfConvTask {
|
||||
deriveEncoder[Args]
|
||||
}
|
||||
|
||||
val taskName = Ident.unsafe("pdf-files-migration")
|
||||
val taskName: Ident = Ident.unsafe("pdf-files-migration")
|
||||
|
||||
def apply[F[_]: Async](cfg: Config, store: Store[F]): Task[F, Args, Unit] =
|
||||
def apply[F[_]: Async: Files](cfg: Config, store: Store[F]): Task[F, Args, Unit] =
|
||||
Task { ctx =>
|
||||
for {
|
||||
_ <- ctx.logger.info(s"Converting pdf file ${ctx.args} using ocrmypdf")
|
||||
@ -89,7 +90,7 @@ object PdfConvTask {
|
||||
else none.pure[F]
|
||||
}
|
||||
|
||||
def convert[F[_]: Async](
|
||||
def convert[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
ctx: Context[F, Args],
|
||||
store: Store[F],
|
||||
|
@ -11,6 +11,7 @@ import cats.data.{Kleisli, OptionT}
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.Stream
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.common._
|
||||
import docspell.convert.ConversionResult.Handler
|
||||
@ -35,7 +36,7 @@ import docspell.store.records._
|
||||
object ConvertPdf {
|
||||
type Args = ProcessItemArgs
|
||||
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Files](
|
||||
cfg: ConvertConfig,
|
||||
store: Store[F],
|
||||
item: ItemData
|
||||
@ -76,7 +77,7 @@ object ConvertPdf {
|
||||
.map(_.mimetype)
|
||||
.getOrElse(MimeType.octetStream)
|
||||
|
||||
def convertSafe[F[_]: Async](
|
||||
def convertSafe[F[_]: Async: Files](
|
||||
cfg: ConvertConfig,
|
||||
sanitizeHtml: SanitizeHtml,
|
||||
ctx: Context[F, Args],
|
||||
|
@ -14,6 +14,7 @@ import cats.implicits._
|
||||
import cats.kernel.Monoid
|
||||
import cats.kernel.Order
|
||||
import fs2.Stream
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.common._
|
||||
import docspell.common.util.Zip
|
||||
@ -35,12 +36,12 @@ import emil.Mail
|
||||
object ExtractArchive {
|
||||
type Args = ProcessItemArgs
|
||||
|
||||
def apply[F[_]: Async](store: Store[F])(
|
||||
def apply[F[_]: Async: Files](store: Store[F])(
|
||||
item: ItemData
|
||||
): Task[F, Args, ItemData] =
|
||||
multiPass(store, item, None).map(_._2)
|
||||
|
||||
def multiPass[F[_]: Async](
|
||||
def multiPass[F[_]: Async: Files](
|
||||
store: Store[F],
|
||||
item: ItemData,
|
||||
archive: Option[RAttachmentArchive]
|
||||
@ -50,7 +51,7 @@ object ExtractArchive {
|
||||
else multiPass(store, t._2, t._1)
|
||||
}
|
||||
|
||||
def singlePass[F[_]: Async](
|
||||
def singlePass[F[_]: Async: Files](
|
||||
store: Store[F],
|
||||
item: ItemData,
|
||||
archive: Option[RAttachmentArchive]
|
||||
@ -91,7 +92,7 @@ object ExtractArchive {
|
||||
.map(_.mimetype)
|
||||
.getOrElse(MimeType.octetStream)
|
||||
|
||||
def extractSafe[F[_]: Async](
|
||||
def extractSafe[F[_]: Async: Files](
|
||||
ctx: Context[F, Args],
|
||||
store: Store[F],
|
||||
archive: Option[RAttachmentArchive]
|
||||
@ -137,7 +138,7 @@ object ExtractArchive {
|
||||
} yield extracted.copy(files = extracted.files.filter(_.id != ra.id))
|
||||
}
|
||||
|
||||
def extractZip[F[_]: Async](
|
||||
def extractZip[F[_]: Async: Files](
|
||||
ctx: Context[F, Args],
|
||||
store: Store[F],
|
||||
archive: Option[RAttachmentArchive]
|
||||
|
@ -10,6 +10,7 @@ import cats.data.OptionT
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.Stream
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.analysis.TextAnalyser
|
||||
import docspell.backend.joex.AddonOps
|
||||
@ -36,7 +37,7 @@ object ItemHandler {
|
||||
}
|
||||
)
|
||||
|
||||
def newItem[F[_]: Async](
|
||||
def newItem[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
store: Store[F],
|
||||
itemOps: OItem[F],
|
||||
@ -82,7 +83,7 @@ object ItemHandler {
|
||||
def isLastRetry[F[_]]: Task[F, Args, Boolean] =
|
||||
Task(_.isLastRetry)
|
||||
|
||||
def safeProcess[F[_]: Async](
|
||||
def safeProcess[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
store: Store[F],
|
||||
itemOps: OItem[F],
|
||||
|
@ -8,6 +8,7 @@ package docspell.joex.process
|
||||
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.addons.AddonTriggerType
|
||||
import docspell.analysis.TextAnalyser
|
||||
@ -22,7 +23,7 @@ import docspell.store.Store
|
||||
|
||||
object ProcessItem {
|
||||
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
itemOps: OItem[F],
|
||||
fts: FtsClient[F],
|
||||
@ -40,7 +41,7 @@ object ProcessItem {
|
||||
.flatMap(RemoveEmptyItem(itemOps))
|
||||
.flatMap(RunAddons(addonOps, store, AddonTriggerType.FinalProcessItem))
|
||||
|
||||
def processAttachments[F[_]: Async](
|
||||
def processAttachments[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
fts: FtsClient[F],
|
||||
analyser: TextAnalyser[F],
|
||||
@ -49,7 +50,7 @@ object ProcessItem {
|
||||
)(item: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
||||
processAttachments0[F](cfg, fts, analyser, regexNer, store, (30, 60, 90))(item)
|
||||
|
||||
def analysisOnly[F[_]: Async](
|
||||
def analysisOnly[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
analyser: TextAnalyser[F],
|
||||
regexNer: RegexNerFile[F],
|
||||
@ -61,7 +62,7 @@ object ProcessItem {
|
||||
.flatMap(CrossCheckProposals[F](store))
|
||||
.flatMap(SaveProposals[F](store))
|
||||
|
||||
private def processAttachments0[F[_]: Async](
|
||||
private def processAttachments0[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
fts: FtsClient[F],
|
||||
analyser: TextAnalyser[F],
|
||||
|
@ -9,6 +9,7 @@ package docspell.joex.process
|
||||
import cats.data.OptionT
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.addons.AddonTriggerType
|
||||
import docspell.analysis.TextAnalyser
|
||||
@ -30,7 +31,7 @@ import docspell.store.records.RItem
|
||||
object ReProcessItem {
|
||||
type Args = ReProcessItemArgs
|
||||
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
fts: FtsClient[F],
|
||||
itemOps: OItem[F],
|
||||
@ -106,7 +107,7 @@ object ReProcessItem {
|
||||
)
|
||||
}
|
||||
|
||||
def processFiles[F[_]: Async](
|
||||
def processFiles[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
fts: FtsClient[F],
|
||||
itemOps: OItem[F],
|
||||
@ -162,7 +163,7 @@ object ReProcessItem {
|
||||
def isLastRetry[F[_]]: Task[F, Args, Boolean] =
|
||||
Task(_.isLastRetry)
|
||||
|
||||
def safeProcess[F[_]: Async](
|
||||
def safeProcess[F[_]: Async: Files](
|
||||
cfg: Config,
|
||||
fts: FtsClient[F],
|
||||
itemOps: OItem[F],
|
||||
|
@ -8,6 +8,7 @@ package docspell.joex.process
|
||||
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.addons.AddonTriggerType
|
||||
import docspell.backend.joex.AddonOps
|
||||
@ -22,7 +23,7 @@ import docspell.store.Store
|
||||
object RunAddons {
|
||||
type Args = ProcessItemArgs
|
||||
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Files](
|
||||
ops: AddonOps[F],
|
||||
store: Store[F],
|
||||
trigger: AddonTriggerType
|
||||
|
@ -9,6 +9,7 @@ package docspell.joex.process
|
||||
import cats.Traverse
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.analysis.classifier.TextClassifier
|
||||
import docspell.analysis.{NlpSettings, TextAnalyser}
|
||||
@ -26,7 +27,7 @@ import docspell.store.records.{RAttachmentMeta, RClassifierSetting}
|
||||
object TextAnalysis {
|
||||
type Args = ProcessItemArgs
|
||||
|
||||
def apply[F[_]: Async](
|
||||
def apply[F[_]: Async: Files](
|
||||
cfg: Config.TextAnalysis,
|
||||
analyser: TextAnalyser[F],
|
||||
nerFile: RegexNerFile[F],
|
||||
@ -87,7 +88,7 @@ object TextAnalysis {
|
||||
} yield (rm.copy(nerlabels = labels.all.toList), AttachmentDates(rm, labels.dates))
|
||||
}
|
||||
|
||||
def predictTags[F[_]: Async](
|
||||
def predictTags[F[_]: Async: Files](
|
||||
ctx: Context[F, Args],
|
||||
store: Store[F],
|
||||
cfg: Config.TextAnalysis,
|
||||
@ -107,7 +108,7 @@ object TextAnalysis {
|
||||
} yield tags.flatten
|
||||
}
|
||||
|
||||
def predictItemEntities[F[_]: Async](
|
||||
def predictItemEntities[F[_]: Async: Files](
|
||||
ctx: Context[F, Args],
|
||||
store: Store[F],
|
||||
cfg: Config.TextAnalysis,
|
||||
@ -139,7 +140,7 @@ object TextAnalysis {
|
||||
.map(MetaProposalList.apply)
|
||||
}
|
||||
|
||||
private def makeClassify[F[_]: Async](
|
||||
private def makeClassify[F[_]: Async: Files](
|
||||
ctx: Context[F, Args],
|
||||
store: Store[F],
|
||||
cfg: Config.TextAnalysis,
|
||||
|
@ -9,6 +9,7 @@ package docspell.joex.process
|
||||
import cats.data.OptionT
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import fs2.io.file.Files
|
||||
|
||||
import docspell.common._
|
||||
import docspell.extract.{ExtractConfig, ExtractResult, Extraction}
|
||||
@ -19,7 +20,7 @@ import docspell.store.records.{RAttachment, RAttachmentMeta, RFileMeta}
|
||||
|
||||
object TextExtraction {
|
||||
|
||||
def apply[F[_]: Async](cfg: ExtractConfig, fts: FtsClient[F], store: Store[F])(
|
||||
def apply[F[_]: Async: Files](cfg: ExtractConfig, fts: FtsClient[F], store: Store[F])(
|
||||
item: ItemData
|
||||
): Task[F, ProcessItemArgs, ItemData] =
|
||||
Task { ctx =>
|
||||
@ -66,7 +67,7 @@ object TextExtraction {
|
||||
|
||||
case class Result(am: RAttachmentMeta, td: TextData, tags: List[String] = Nil)
|
||||
|
||||
def extractTextIfEmpty[F[_]: Async](
|
||||
def extractTextIfEmpty[F[_]: Async: Files](
|
||||
ctx: Context[F, ProcessItemArgs],
|
||||
store: Store[F],
|
||||
cfg: ExtractConfig,
|
||||
@ -100,7 +101,7 @@ object TextExtraction {
|
||||
}
|
||||
}
|
||||
|
||||
def extractTextToMeta[F[_]: Async](
|
||||
def extractTextToMeta[F[_]: Async: Files](
|
||||
ctx: Context[F, _],
|
||||
store: Store[F],
|
||||
cfg: ExtractConfig,
|
||||
@ -143,7 +144,7 @@ object TextExtraction {
|
||||
.flatMap(mt => extr.extractText(data, DataType(mt), lang))
|
||||
}
|
||||
|
||||
private def extractTextFallback[F[_]: Async](
|
||||
private def extractTextFallback[F[_]: Async: Files](
|
||||
ctx: Context[F, _],
|
||||
store: Store[F],
|
||||
cfg: ExtractConfig,
|
||||
|
Reference in New Issue
Block a user