Updated following dependencies as they need changes to the code to work properly:

- Scala
- fs2
- http4s
This commit is contained in:
Rehan Mahmood
2023-10-31 14:24:00 -04:00
parent c9ebd15b23
commit 2a39b2f6a6
64 changed files with 224 additions and 150 deletions

View File

@ -9,6 +9,7 @@ package docspell.analysis
import cats.Applicative
import cats.effect._
import cats.implicits._
import fs2.io.file.Files
import docspell.analysis.classifier.{StanfordTextClassifier, TextClassifier}
import docspell.analysis.contact.Contact
@ -36,7 +37,7 @@ object TextAnalyser {
labels ++ dates.map(dl => dl.label.copy(label = dl.date.toString))
}
def create[F[_]: Async](cfg: TextAnalysisConfig): Resource[F, TextAnalyser[F]] =
def create[F[_]: Async: Files](cfg: TextAnalysisConfig): Resource[F, TextAnalyser[F]] =
Resource
.eval(Nlp(cfg.nlpConfig))
.map(stanfordNer =>
@ -83,7 +84,7 @@ object TextAnalyser {
/** Provides the nlp pipeline based on the configuration. */
private object Nlp {
def apply[F[_]: Async](
def apply[F[_]: Async: Files](
cfg: TextAnalysisConfig.NlpConfig
): F[Input[F] => F[Vector[NerLabel]]] = {
val log = docspell.logging.getLogger[F]

View File

@ -21,7 +21,7 @@ import docspell.logging.Logger
import edu.stanford.nlp.classify.ColumnDataClassifier
final class StanfordTextClassifier[F[_]: Async](cfg: TextClassifierConfig)
final class StanfordTextClassifier[F[_]: Async: Files](cfg: TextClassifierConfig)
extends TextClassifier[F] {
def trainClassifier[A](

View File

@ -11,6 +11,7 @@ import scala.concurrent.duration.{Duration => _, _}
import cats.effect.Ref
import cats.effect._
import cats.implicits._
import fs2.io.file.Files
import docspell.analysis.NlpSettings
import docspell.common._
@ -32,7 +33,7 @@ trait PipelineCache[F[_]] {
object PipelineCache {
private[this] val logger = docspell.logging.unsafeLogger
def apply[F[_]: Async](clearInterval: Duration)(
def apply[F[_]: Async: Files](clearInterval: Duration)(
creator: NlpSettings => Annotator[F],
release: F[Unit]
): F[PipelineCache[F]] = {
@ -44,7 +45,7 @@ object PipelineCache {
} yield new Impl[F](data, creator, cacheClear)
}
final private class Impl[F[_]: Async](
final private class Impl[F[_]: Async: Files](
data: Ref[F, Map[String, Entry[Annotator[F]]]],
creator: NlpSettings => Annotator[F],
cacheClear: CacheClearing[F]