Update scalafmt settings

This commit is contained in:
eikek 2021-09-22 17:23:24 +02:00
parent c37f1d7c31
commit 9013f2de5b
277 changed files with 1579 additions and 1615 deletions

View File

@ -1,6 +1,7 @@
version = "3.0.4"
preset = defaultWithAlign
preset = default
align.preset = some
maxColumn = 90
@ -12,3 +13,10 @@ rewrite.rules = [
PreferCurlyFors
SortModifiers
]
assumeStandardLibraryStripMargin = true
align.stripMargin = true
docstrings.style = SpaceAsterisk
docstrings.oneline = fold
docstrings.wrap = "yes"

View File

@ -4,7 +4,7 @@ import com.typesafe.sbt.SbtGit.GitKeys._
import docspell.build._
import de.heikoseeberger.sbtheader.CommentBlockCreator
val toolsPackage = taskKey[Seq[File]]("Package the scripts/extension tools")
val toolsPackage = taskKey[Seq[File]]("Package the scripts/extension tools")
val elmCompileMode = settingKey[ElmCompileMode]("How to compile elm sources")
// --- Settings
@ -13,20 +13,20 @@ def inTest(d0: Seq[ModuleID], ds: Seq[ModuleID]*) =
ds.fold(d0)(_ ++ _).map(_ % Test)
val scalafixSettings = Seq(
semanticdbEnabled := true, // enable SemanticDB
semanticdbEnabled := true, // enable SemanticDB
semanticdbVersion := scalafixSemanticdb.revision, //"4.4.0"
ThisBuild / scalafixDependencies ++= Dependencies.organizeImports
)
val sharedSettings = Seq(
organization := "com.github.eikek",
scalaVersion := "2.13.6",
organization := "com.github.eikek",
scalaVersion := "2.13.6",
organizationName := "Eike K. & Contributors",
licenses += ("AGPL-3.0-or-later", url(
"https://spdx.org/licenses/AGPL-3.0-or-later.html"
)),
startYear := Some(2020),
headerLicenseStyle := HeaderLicenseStyle.SpdxSyntax,
startYear := Some(2020),
headerLicenseStyle := HeaderLicenseStyle.SpdxSyntax,
headerSources / excludeFilter := HiddenFileFilter || "*.java" || "StringUtil.scala",
scalacOptions ++= Seq(
"-deprecation",
@ -45,9 +45,9 @@ val sharedSettings = Seq(
),
javacOptions ++= Seq("-target", "1.8", "-source", "1.8"),
LocalRootProject / toolsPackage := {
val v = version.value
val v = version.value
val logger = streams.value.log
val dir = (LocalRootProject / baseDirectory).value / "tools"
val dir = (LocalRootProject / baseDirectory).value / "tools"
packageTools(logger, dir, v)
},
Compile / console / scalacOptions :=
@ -55,7 +55,7 @@ val sharedSettings = Seq(
Test / console / scalacOptions :=
(scalacOptions.value.filter(o => !o.contains("-Xlint") && !o.contains("-W"))),
libraryDependencySchemes ++= Seq(
"com.github.eikek" %% "calev-core" % VersionScheme.Always,
"com.github.eikek" %% "calev-core" % VersionScheme.Always,
"com.github.eikek" %% "calev-circe" % VersionScheme.Always
)
) ++ scalafixSettings
@ -66,8 +66,8 @@ val testSettingsMUnit = Seq(
)
lazy val noPublish = Seq(
publish := {},
publishLocal := {},
publish := {},
publishLocal := {},
publishArtifact := false
)
@ -113,7 +113,7 @@ def webjarSettings(queryJS: Project) = Seq(
}.taskValue,
Compile / resourceGenerators += Def.task {
val logger = streams.value.log
val out = (queryJS / Compile / fullOptJS).value
val out = (queryJS / Compile / fullOptJS).value
logger.info(s"Produced query js file: ${out.data}")
copyWebjarResources(
Seq(out.data),
@ -297,7 +297,7 @@ val files = project
Dependencies.tika ++
Dependencies.icu4j,
Test / sourceGenerators += Def.task {
val base = (Test / resourceDirectory).value
val base = (Test / resourceDirectory).value
val files = (base ** (_.isFile)).pair(sbt.io.Path.relativeTo(base))
val lines = files.toList.map(_._2).map { s =>
val ident = s.replaceAll("[^a-zA-Z0-9_]+", "_")
@ -466,9 +466,9 @@ val restapi = project
libraryDependencies ++=
Dependencies.circe,
openapiTargetLanguage := Language.Scala,
openapiPackage := Pkg("docspell.restapi.model"),
openapiSpec := (Compile / resourceDirectory).value / "docspell-openapi.yml",
openapiStaticGen := OpenApiDocGenerator.Redoc
openapiPackage := Pkg("docspell.restapi.model"),
openapiSpec := (Compile / resourceDirectory).value / "docspell-openapi.yml",
openapiStaticGen := OpenApiDocGenerator.Redoc
)
.dependsOn(common)
@ -486,9 +486,9 @@ val joexapi = project
Dependencies.http4sCirce ++
Dependencies.http4sClient,
openapiTargetLanguage := Language.Scala,
openapiPackage := Pkg("docspell.joexapi.model"),
openapiSpec := (Compile / resourceDirectory).value / "joex-openapi.yml",
openapiStaticGen := OpenApiDocGenerator.Redoc
openapiPackage := Pkg("docspell.joexapi.model"),
openapiSpec := (Compile / resourceDirectory).value / "joex-openapi.yml",
openapiStaticGen := OpenApiDocGenerator.Redoc
)
.dependsOn(common)
@ -535,9 +535,9 @@ val webapp = project
.settings(stylesSettings)
.settings(webjarSettings(query.js))
.settings(
name := "docspell-webapp",
name := "docspell-webapp",
openapiTargetLanguage := Language.Elm,
openapiPackage := Pkg("Api.Model"),
openapiPackage := Pkg("Api.Model"),
openapiSpec := (restapi / Compile / resourceDirectory).value / "docspell-openapi.yml",
openapiElmConfig := ElmConfig().withJson(ElmJson.decodePipeline)
)
@ -561,7 +561,7 @@ val joex = project
.settings(
name := "docspell-joex",
description := "The joex component (job executor) for docspell which executes long-running tasks.",
packageSummary := "Docspell Joex",
packageSummary := "Docspell Joex",
packageDescription := description.value,
libraryDependencies ++=
Dependencies.fs2 ++
@ -604,9 +604,9 @@ val restserver = project
.settings(debianSettings("docspell-server"))
.settings(buildInfoSettings)
.settings(
name := "docspell-restserver",
description := "Docspell server providing the user interface and a REST Api.",
packageSummary := "Docspell Rest server",
name := "docspell-restserver",
description := "Docspell server providing the user interface and a REST Api.",
packageSummary := "Docspell Rest server",
packageDescription := description.value,
libraryDependencies ++=
Dependencies.http4sServer ++
@ -661,15 +661,15 @@ val website = project
.enablePlugins(ZolaPlugin, GitHubPagesPlugin)
.settings(sharedSettings)
.settings(
name := "docspell-website",
publishArtifact := false,
publish / skip := true,
gitHubPagesOrgName := "eikek",
name := "docspell-website",
publishArtifact := false,
publish / skip := true,
gitHubPagesOrgName := "eikek",
gitHubPagesRepoName := "docspell",
gitHubPagesSiteDir := zolaOutputDir.value,
gitHubPagesSiteDir := zolaOutputDir.value,
Compile / resourceGenerators += Def.task {
val templateOut = baseDirectory.value / "site" / "templates" / "shortcodes"
val staticOut = baseDirectory.value / "site" / "static" / "openapi"
val staticOut = baseDirectory.value / "site" / "static" / "openapi"
IO.createDirectories(Seq(templateOut, staticOut))
val logger = streams.value.log
@ -692,14 +692,14 @@ val website = project
IO.write(
target,
"""|+++
|title = "Changelog"
|description = "See what changed between releases."
|weight = 10
|insert_anchor_links = "right"
|[extra]
|maketoc = false
|+++
|""".stripMargin
|title = "Changelog"
|description = "See what changed between releases."
|weight = 10
|insert_anchor_links = "right"
|[extra]
|maketoc = false
|+++
|""".stripMargin
)
IO.append(target, IO.readBytes(changelog))
Seq(target)
@ -798,7 +798,7 @@ def compileElm(
}
def createWebjarSource(wj: Seq[ModuleID], out: File): Seq[File] = {
val target = out / "Webjars.scala"
val target = out / "Webjars.scala"
val badChars = "-.".toSet
val fields = wj
.map(m =>
@ -808,10 +808,10 @@ def createWebjarSource(wj: Seq[ModuleID], out: File): Seq[File] = {
)
.mkString("\n\n")
val content = s"""package docspell.restserver.webapp
|object Webjars {
|$fields
|}
|""".stripMargin
|object Webjars {
|$fields
|}
|""".stripMargin
IO.write(target, content)
Seq(target)
@ -824,15 +824,15 @@ def packageTools(logger: Logger, dir: File, version: String): Seq[File] = {
val archive = target / s"docspell-tools-$version.zip"
logger.info(s"Packaging tools to $archive ...")
val webext = target / "docspell-firefox-extension.xpi"
val wx = dir / "webextension"
val wx = dir / "webextension"
IO.zip(
Seq(
wx / "_locales/de/messages.json" -> "_locales/de/messages.json",
wx / "_locales/en/messages.json" -> "_locales/en/messages.json",
wx / "docspell.js" -> "docspell.js",
wx / "icons" / "logo-48.png" -> "icons/logo-48.png",
wx / "icons" / "logo-96.png" -> "icons/logo-96.png",
wx / "manifest.json" -> "manifest.json"
wx / "docspell.js" -> "docspell.js",
wx / "icons" / "logo-48.png" -> "icons/logo-48.png",
wx / "icons" / "logo-96.png" -> "icons/logo-96.png",
wx / "manifest.json" -> "manifest.json"
),
webext,
None

View File

@ -54,7 +54,7 @@ object TextAnalyser {
tags0 <- stanfordNer(Nlp.Input(cacheKey, settings, logger, input))
tags1 <- contactNer(input)
dates <- dateNer(settings.lang, input)
list = tags0 ++ tags1
list = tags0 ++ tags1
spans = NerLabelSpan.build(list)
} yield Result(spans ++ list, dates)

View File

@ -31,10 +31,10 @@ final class StanfordTextClassifier[F[_]: Async](cfg: TextClassifierConfig)
.withTempDir(cfg.workingDir, "trainclassifier")
.use { dir =>
for {
rawData <- writeDataFile(dir, data)
_ <- logger.debug(s"Learning from ${rawData.count} items.")
rawData <- writeDataFile(dir, data)
_ <- logger.debug(s"Learning from ${rawData.count} items.")
trainData <- splitData(logger, rawData)
scores <- cfg.classifierConfigs.traverse(m => train(logger, trainData, m))
scores <- cfg.classifierConfigs.traverse(m => train(logger, trainData, m))
sorted = scores.sortBy(-_.score)
res <- handler(sorted.head.model)
} yield res
@ -77,7 +77,7 @@ final class StanfordTextClassifier[F[_]: Async](cfg: TextClassifierConfig)
} yield res
def splitData(logger: Logger[F], in: RawData): F[TrainData] = {
val f = if (cfg.classifierConfigs.size > 1) 0.15 else 0.0
val f = if (cfg.classifierConfigs.size > 1) 0.15 else 0.0
val nTest = (in.count * f).toLong
val td =
@ -142,8 +142,8 @@ final class StanfordTextClassifier[F[_]: Async](cfg: TextClassifierConfig)
props: Map[String, String]
): Map[String, String] =
prepend("2.", props) ++ Map(
"trainFile" -> trainData.train.absolutePathAsString,
"testFile" -> trainData.test.absolutePathAsString,
"trainFile" -> trainData.train.absolutePathAsString,
"testFile" -> trainData.test.absolutePathAsString,
"serializeTo" -> trainData.modelFile.absolutePathAsString
).toList

View File

@ -33,7 +33,7 @@ object Contact {
if (atIdx <= 0 || str.indexOf('@', atIdx + 1) > 0) false
else {
val name = str.substring(0, atIdx)
val dom = str.substring(atIdx + 1)
val dom = str.substring(atIdx + 1)
Domain.isDomain(dom) && name.forall(c => !c.isWhitespace)
}
}

View File

@ -14,8 +14,7 @@ private[analysis] object Tld {
def endsWithTld(str: String): Boolean =
findTld(str).isDefined
/** Some selected TLDs.
*/
/** Some selected TLDs. */
private[this] val known = List(
".com",
".org",

View File

@ -177,17 +177,17 @@ object DateFind {
object Result {
final case class Success[A](value: A, rest: List[Word]) extends Result[A] {
val toOption = Some(value)
val toOption = Some(value)
def flatMap[B](f: A => Result[B]): Result[B] = f(value)
def map[B](f: A => B): Result[B] = Success(f(value), rest)
def map[B](f: A => B): Result[B] = Success(f(value), rest)
def next[B](r: Reader[B]): Result[(A, B)] =
r.read(rest).map(b => (value, b))
}
final case object Failure extends Result[Nothing] {
val toOption = None
val toOption = None
def flatMap[B](f: Nothing => Result[B]): Result[B] = this
def map[B](f: Nothing => B): Result[B] = this
def next[B](r: Reader[B]): Result[(Nothing, B)] = this
def map[B](f: Nothing => B): Result[B] = this
def next[B](r: Reader[B]): Result[(Nothing, B)] = this
}
implicit def resultSemigroup[A: Semigroup]: Semigroup[Result[A]] =

View File

@ -74,9 +74,9 @@ object BasicCRFAnnotator {
}
final class Cache {
private[this] lazy val germanNerClassifier = makeAnnotator(Language.German)
private[this] lazy val germanNerClassifier = makeAnnotator(Language.German)
private[this] lazy val englishNerClassifier = makeAnnotator(Language.English)
private[this] lazy val frenchNerClassifier = makeAnnotator(Language.French)
private[this] lazy val frenchNerClassifier = makeAnnotator(Language.French)
def forLang(language: NLPLanguage): Annotator =
language match {

View File

@ -38,9 +38,9 @@ object PipelineCache {
release: F[Unit]
): F[PipelineCache[F]] =
for {
data <- Ref.of(Map.empty[String, Entry[Annotator[F]]])
data <- Ref.of(Map.empty[String, Entry[Annotator[F]]])
cacheClear <- CacheClearing.create(data, clearInterval, release)
_ <- Logger.log4s(logger).info("Creating nlp pipeline cache")
_ <- Logger.log4s(logger).info("Creating nlp pipeline cache")
} yield new Impl[F](data, creator, cacheClear)
final private class Impl[F[_]: Async](
@ -51,7 +51,7 @@ object PipelineCache {
def obtain(key: String, settings: NlpSettings): Resource[F, Annotator[F]] =
for {
_ <- cacheClear.withCache
_ <- cacheClear.withCache
id <- Resource.eval(makeSettingsId(settings))
nlp <- Resource.eval(
data.modify(cache => getOrCreate(key, id, cache, settings, creator))
@ -73,13 +73,13 @@ object PipelineCache {
s"StanfordNLP settings changed for key $key. Creating new classifier"
)
val nlp = creator(settings)
val e = Entry(id, nlp)
val e = Entry(id, nlp)
(cache.updated(key, e), nlp)
}
case None =>
val nlp = creator(settings)
val e = Entry(id, nlp)
val e = Entry(id, nlp)
(cache.updated(key, e), nlp)
}
@ -114,7 +114,7 @@ object PipelineCache {
release: F[Unit]
): F[CacheClearing[F]] =
for {
counter <- Ref.of(0L)
counter <- Ref.of(0L)
cleaning <- Ref.of(None: Option[Fiber[F, Throwable, Unit]])
log = Logger.log4s(logger)
result <-

View File

@ -44,47 +44,47 @@ object Properties {
def nerGerman(regexNerMappingFile: Option[String], highRecall: Boolean): JProps =
Properties(
"annotators" -> "tokenize,ssplit,mwt,pos,lemma,ner",
"tokenize.language" -> "de",
"mwt.mappingFile" -> "edu/stanford/nlp/models/mwt/german/german-mwt.tsv",
"pos.model" -> "edu/stanford/nlp/models/pos-tagger/german-ud.tagger",
"ner.statisticalOnly" -> "true",
"ner.rulesOnly" -> "false",
"annotators" -> "tokenize,ssplit,mwt,pos,lemma,ner",
"tokenize.language" -> "de",
"mwt.mappingFile" -> "edu/stanford/nlp/models/mwt/german/german-mwt.tsv",
"pos.model" -> "edu/stanford/nlp/models/pos-tagger/german-ud.tagger",
"ner.statisticalOnly" -> "true",
"ner.rulesOnly" -> "false",
"ner.applyFineGrained" -> "false",
"ner.applyNumericClassifiers" -> "false", //only english supported, not needed currently
"ner.useSUTime" -> "false", //only english, unused in docspell
"ner.language" -> "de",
"ner.language" -> "de",
"ner.model" -> "edu/stanford/nlp/models/ner/german.distsim.crf.ser.gz,edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz"
).withRegexNer(regexNerMappingFile).withHighRecall(highRecall)
def nerEnglish(regexNerMappingFile: Option[String]): JProps =
Properties(
"annotators" -> "tokenize,ssplit,pos,lemma,ner",
"annotators" -> "tokenize,ssplit,pos,lemma,ner",
"tokenize.language" -> "en",
"pos.model" -> "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger",
"ner.statisticalOnly" -> "true",
"ner.rulesOnly" -> "false",
"ner.applyFineGrained" -> "false",
"ner.statisticalOnly" -> "true",
"ner.rulesOnly" -> "false",
"ner.applyFineGrained" -> "false",
"ner.applyNumericClassifiers" -> "false",
"ner.useSUTime" -> "false",
"ner.language" -> "en",
"ner.useSUTime" -> "false",
"ner.language" -> "en",
"ner.model" -> "edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz"
).withRegexNer(regexNerMappingFile)
def nerFrench(regexNerMappingFile: Option[String], highRecall: Boolean): JProps =
Properties(
"annotators" -> "tokenize,ssplit,mwt,pos,lemma,ner",
"annotators" -> "tokenize,ssplit,mwt,pos,lemma,ner",
"tokenize.language" -> "fr",
"mwt.mappingFile" -> "edu/stanford/nlp/models/mwt/french/french-mwt.tsv",
"mwt.pos.model" -> "edu/stanford/nlp/models/mwt/french/french-mwt.tagger",
"mwt.mappingFile" -> "edu/stanford/nlp/models/mwt/french/french-mwt.tsv",
"mwt.pos.model" -> "edu/stanford/nlp/models/mwt/french/french-mwt.tagger",
"mwt.statisticalMappingFile" -> "edu/stanford/nlp/models/mwt/french/french-mwt-statistical.tsv",
"pos.model" -> "edu/stanford/nlp/models/pos-tagger/french-ud.tagger",
"ner.statisticalOnly" -> "true",
"ner.rulesOnly" -> "false",
"pos.model" -> "edu/stanford/nlp/models/pos-tagger/french-ud.tagger",
"ner.statisticalOnly" -> "true",
"ner.rulesOnly" -> "false",
"ner.applyFineGrained" -> "false",
"ner.applyNumericClassifiers" -> "false",
"ner.useSUTime" -> "false",
"ner.language" -> "de",
"ner.useSUTime" -> "false",
"ner.language" -> "de",
"ner.model" -> "edu/stanford/nlp/models/ner/french-wikiner-4class.crf.ser.gz,edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz"
).withRegexNer(regexNerMappingFile).withHighRecall(highRecall)

View File

@ -8,15 +8,14 @@ package docspell.analysis.split
import fs2.Stream
/** Splits text into words.
*/
/** Splits text into words. */
object TextSplitter {
private[this] val trimChars =
".,…_[]^!<>=&ſ/{}*?()-:#$|~`+%\\\"'; \t\r\n".toSet
def split[F[_]](str: String, sep: Set[Char], start: Int = 0): Stream[F, Word] = {
val indexes = sep.map(c => str.indexOf(c.toInt)).filter(_ >= 0)
val index = if (indexes.isEmpty) -1 else indexes.min
val index = if (indexes.isEmpty) -1 else indexes.min
if (index < 0) Stream.emit(Word(str, start, start + str.length))
else if (index == 0) split(str.substring(1), sep, start + 1)

View File

@ -7,9 +7,9 @@
package docspell.analysis.split
case class Word(value: String, begin: Int, end: Int) {
def isEmpty: Boolean = value.isEmpty
def isEmpty: Boolean = value.isEmpty
def nonEmpty: Boolean = !isEmpty
def length: Int = value.length
def length: Int = value.length
def trimLeft(chars: Set[Char]): Word = {
val v = value.dropWhile(chars.contains)

View File

@ -91,19 +91,19 @@ class StanfordNerAnnotatorSuite extends FunSuite {
val regexNerContent =
s"""(?i)volantino ag${"\t"}ORGANIZATION${"\t"}LOCATION,PERSON,MISC${"\t"}3
|(?i)volantino${"\t"}ORGANIZATION${"\t"}LOCATION,PERSON,MISC${"\t"}3
|(?i)ag${"\t"}ORGANIZATION${"\t"}LOCATION,PERSON,MISC${"\t"}3
|(?i)andrea rossi${"\t"}PERSON${"\t"}LOCATION,MISC${"\t"}2
|(?i)andrea${"\t"}PERSON${"\t"}LOCATION,MISC${"\t"}2
|(?i)rossi${"\t"}PERSON${"\t"}LOCATION,MISC${"\t"}2
|""".stripMargin
|(?i)volantino${"\t"}ORGANIZATION${"\t"}LOCATION,PERSON,MISC${"\t"}3
|(?i)ag${"\t"}ORGANIZATION${"\t"}LOCATION,PERSON,MISC${"\t"}3
|(?i)andrea rossi${"\t"}PERSON${"\t"}LOCATION,MISC${"\t"}2
|(?i)andrea${"\t"}PERSON${"\t"}LOCATION,MISC${"\t"}2
|(?i)rossi${"\t"}PERSON${"\t"}LOCATION,MISC${"\t"}2
|""".stripMargin
File
.withTempDir[IO](File.path(Paths.get("target")), "test-regex-ner")
.use { dir =>
for {
out <- File.writeString[IO](dir / "regex.txt", regexNerContent)
ann = StanfordNerAnnotator.makePipeline(StanfordNerSettings.RegexOnly(out))
ann = StanfordNerAnnotator.makePipeline(StanfordNerSettings.RegexOnly(out))
labels = StanfordNerAnnotator.nerAnnotate(ann, "Hello Andrea Rossi, can you.")
_ <- IO(
assertEquals(

View File

@ -59,54 +59,54 @@ object BackendApp {
ftsClient: FtsClient[F]
): Resource[F, BackendApp[F]] =
for {
utStore <- UserTaskStore(store)
queue <- JobQueue(store)
totpImpl <- OTotp(store, Totp.default)
loginImpl <- Login[F](store, Totp.default)
signupImpl <- OSignup[F](store)
joexImpl <- OJoex(JoexClient(httpClient), store)
collImpl <- OCollective[F](store, utStore, queue, joexImpl)
sourceImpl <- OSource[F](store)
tagImpl <- OTag[F](store)
equipImpl <- OEquipment[F](store)
orgImpl <- OOrganization(store)
uploadImpl <- OUpload(store, queue, joexImpl)
nodeImpl <- ONode(store)
jobImpl <- OJob(store, joexImpl)
createIndex <- CreateIndex.resource(ftsClient, store)
itemImpl <- OItem(store, ftsClient, createIndex, queue, joexImpl)
utStore <- UserTaskStore(store)
queue <- JobQueue(store)
totpImpl <- OTotp(store, Totp.default)
loginImpl <- Login[F](store, Totp.default)
signupImpl <- OSignup[F](store)
joexImpl <- OJoex(JoexClient(httpClient), store)
collImpl <- OCollective[F](store, utStore, queue, joexImpl)
sourceImpl <- OSource[F](store)
tagImpl <- OTag[F](store)
equipImpl <- OEquipment[F](store)
orgImpl <- OOrganization(store)
uploadImpl <- OUpload(store, queue, joexImpl)
nodeImpl <- ONode(store)
jobImpl <- OJob(store, joexImpl)
createIndex <- CreateIndex.resource(ftsClient, store)
itemImpl <- OItem(store, ftsClient, createIndex, queue, joexImpl)
itemSearchImpl <- OItemSearch(store)
fulltextImpl <- OFulltext(itemSearchImpl, ftsClient, store, queue, joexImpl)
fulltextImpl <- OFulltext(itemSearchImpl, ftsClient, store, queue, joexImpl)
javaEmil =
JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug))
mailImpl <- OMail(store, javaEmil)
userTaskImpl <- OUserTask(utStore, queue, joexImpl)
folderImpl <- OFolder(store)
mailImpl <- OMail(store, javaEmil)
userTaskImpl <- OUserTask(utStore, queue, joexImpl)
folderImpl <- OFolder(store)
customFieldsImpl <- OCustomFields(store)
simpleSearchImpl = OSimpleSearch(fulltextImpl, itemSearchImpl)
clientSettingsImpl <- OClientSettings(store)
} yield new BackendApp[F] {
val login = loginImpl
val signup = signupImpl
val collective = collImpl
val source = sourceImpl
val tag = tagImpl
val equipment = equipImpl
val organization = orgImpl
val upload = uploadImpl
val node = nodeImpl
val job = jobImpl
val item = itemImpl
val itemSearch = itemSearchImpl
val fulltext = fulltextImpl
val mail = mailImpl
val joex = joexImpl
val userTask = userTaskImpl
val folder = folderImpl
val customFields = customFieldsImpl
val simpleSearch = simpleSearchImpl
val login = loginImpl
val signup = signupImpl
val collective = collImpl
val source = sourceImpl
val tag = tagImpl
val equipment = equipImpl
val organization = orgImpl
val upload = uploadImpl
val node = nodeImpl
val job = jobImpl
val item = itemImpl
val itemSearch = itemSearchImpl
val fulltext = fulltextImpl
val mail = mailImpl
val joex = joexImpl
val userTask = userTaskImpl
val folder = folderImpl
val customFields = customFieldsImpl
val simpleSearch = simpleSearchImpl
val clientSettings = clientSettingsImpl
val totp = totpImpl
val totp = totpImpl
}
def apply[F[_]: Async](
@ -115,9 +115,9 @@ object BackendApp {
httpClientEc: ExecutionContext
)(ftsFactory: Client[F] => Resource[F, FtsClient[F]]): Resource[F, BackendApp[F]] =
for {
store <- Store.create(cfg.jdbc, cfg.files.chunkSize, connectEC)
store <- Store.create(cfg.jdbc, cfg.files.chunkSize, connectEC)
httpClient <- BlazeClientBuilder[F](httpClientEc).resource
ftsClient <- ftsFactory(httpClient)
backend <- create(cfg, store, httpClient, ftsClient)
ftsClient <- ftsFactory(httpClient)
backend <- create(cfg, store, httpClient, ftsClient)
} yield backend
}

View File

@ -19,7 +19,7 @@ object JobFactory {
account: Option[AccountId]
): F[RJob] =
for {
id <- Ident.randomId[F]
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
@ -39,7 +39,7 @@ object JobFactory {
account: Option[AccountId]
): F[RJob] =
for {
id <- Ident.randomId[F]
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
@ -59,7 +59,7 @@ object JobFactory {
submitter: Option[Ident]
): F[RJob] =
for {
id <- Ident.randomId[F]
id <- Ident.randomId[F]
now <- Timestamp.current[F]
} yield RJob.newJob(
id,
@ -79,7 +79,7 @@ object JobFactory {
prio: Priority
): F[RJob] =
for {
id <- Ident.randomId[F]
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
@ -102,7 +102,7 @@ object JobFactory {
prio: Priority
): F[RJob] =
for {
id <- Ident.randomId[F]
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
@ -124,7 +124,7 @@ object JobFactory {
tracker: Option[Ident]
): F[RJob] =
for {
id <- Ident.randomId[F]
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
@ -163,14 +163,14 @@ object JobFactory {
)
for {
now <- Timestamp.current[F]
now <- Timestamp.current[F]
jobs <- args.traverse(a => create(now, a))
} yield jobs
}
def reIndexAll[F[_]: Sync]: F[RJob] =
for {
id <- Ident.randomId[F]
id <- Ident.randomId[F]
now <- Timestamp.current[F]
} yield RJob.newJob(
id,
@ -186,7 +186,7 @@ object JobFactory {
def reIndex[F[_]: Sync](account: AccountId): F[RJob] =
for {
id <- Ident.randomId[F]
id <- Ident.randomId[F]
now <- Timestamp.current[F]
args = ReIndexTaskArgs(Some(account.collective))
} yield RJob.newJob(

View File

@ -53,8 +53,8 @@ object AuthToken {
case Array(ms, as, fa, salt, sig) =>
for {
millis <- TokenUtil.asInt(ms).toRight("Cannot read authenticator data")
acc <- TokenUtil.b64dec(as).toRight("Cannot read authenticator data")
accId <- AccountId.parse(acc)
acc <- TokenUtil.b64dec(as).toRight("Cannot read authenticator data")
accId <- AccountId.parse(acc)
twofac <- Right[String, Boolean](java.lang.Boolean.parseBoolean(fa))
} yield AuthToken(millis, accId, twofac, salt, sig)
@ -70,15 +70,15 @@ object AuthToken {
for {
salt <- Common.genSaltString[F]
millis = Instant.now.toEpochMilli
cd = AuthToken(millis, accountId, requireSecondFactor, salt, "")
sig = TokenUtil.sign(cd, key)
cd = AuthToken(millis, accountId, requireSecondFactor, salt, "")
sig = TokenUtil.sign(cd, key)
} yield cd.copy(sig = sig)
def update[F[_]: Sync](token: AuthToken, key: ByteVector): F[AuthToken] =
for {
now <- Timestamp.current[F]
now <- Timestamp.current[F]
salt <- Common.genSaltString[F]
data = AuthToken(now.toMillis, token.account, token.requireSecondFactor, salt, "")
sig = TokenUtil.sign(data, key)
sig = TokenUtil.sign(data, key)
} yield data.copy(sig = sig)
}

View File

@ -85,8 +85,8 @@ object Login {
def ok(session: AuthToken, remember: Option[RememberToken]): Result =
Ok(session, remember)
def invalidAuth: Result = InvalidAuth
def invalidTime: Result = InvalidTime
def invalidAuth: Result = InvalidAuth
def invalidTime: Result = InvalidTime
def invalidFactor: Result = InvalidFactor
}
@ -98,7 +98,7 @@ object Login {
def loginExternal(config: Config)(accountId: AccountId): F[Result] =
for {
data <- store.transact(QLogin.findUser(accountId))
_ <- logF.trace(s"Account lookup: $data")
_ <- logF.trace(s"Account lookup: $data")
res <-
if (data.exists(checkNoPassword(_, Set(AccountSource.OpenId))))
doLogin(config, accountId, false)
@ -124,7 +124,7 @@ object Login {
case Right(acc) =>
for {
data <- store.transact(QLogin.findUser(acc))
_ <- Sync[F].delay(logger.trace(s"Account lookup: $data"))
_ <- Sync[F].delay(logger.trace(s"Account lookup: $data"))
res <-
if (data.exists(check(up.pass))) doLogin(config, acc, up.rememberMe)
else Result.invalidAuth.pure[F]
@ -137,7 +137,7 @@ object Login {
def loginSecondFactor(config: Config)(sf: SecondFactor): F[Result] = {
val okResult: F[Result] =
for {
_ <- store.transact(RUser.updateLogin(sf.token.account))
_ <- store.transact(RUser.updateLogin(sf.token.account))
newToken <- AuthToken.user(sf.token.account, false, config.serverSecret)
rem <- OptionT
.whenF(sf.rememberMe && config.rememberMe.enabled)(
@ -180,7 +180,7 @@ object Login {
def loginRememberMe(config: Config)(token: String): F[Result] = {
def okResult(acc: AccountId) =
for {
_ <- store.transact(RUser.updateLogin(acc))
_ <- store.transact(RUser.updateLogin(acc))
token <- AuthToken.user(acc, false, config.serverSecret)
} yield Result.ok(token, None)
@ -270,8 +270,8 @@ object Login {
config: Config
): F[RememberToken] =
for {
rme <- RRememberMe.generate[F](acc)
_ <- store.transact(RRememberMe.insert(rme))
rme <- RRememberMe.generate[F](acc)
_ <- store.transact(RRememberMe.insert(rme))
token <- RememberToken.user(rme.id, config.serverSecret)
} yield token

View File

@ -45,8 +45,8 @@ object RememberToken {
case Array(ms, as, salt, sig) =>
for {
millis <- TokenUtil.asInt(ms).toRight("Cannot read authenticator data")
rId <- TokenUtil.b64dec(as).toRight("Cannot read authenticator data")
accId <- Ident.fromString(rId)
rId <- TokenUtil.b64dec(as).toRight("Cannot read authenticator data")
accId <- Ident.fromString(rId)
} yield RememberToken(millis, accId, salt, sig)
case _ =>
@ -57,8 +57,8 @@ object RememberToken {
for {
salt <- Common.genSaltString[F]
millis = Instant.now.toEpochMilli
cd = RememberToken(millis, rememberId, salt, "")
sig = TokenUtil.sign(cd, key)
cd = RememberToken(millis, rememberId, salt, "")
sig = TokenUtil.sign(cd, key)
} yield cd.copy(sig = sig)
}

View File

@ -43,7 +43,7 @@ object Merge {
def merge(givenIds: NonEmptyList[Ident], collective: Ident): F[Result[RItem]] =
(for {
items <- loadItems(givenIds, collective)
ids = items.map(_.id)
ids = items.map(_.id)
target = moveMainData(items)
_ <- EitherT.right[Error](store.transact(RItem.updateAll(target)))
_ <- EitherT.right[Error](moveTags(ids))
@ -101,7 +101,7 @@ object Merge {
def moveCustomFields(items: NonEmptyList[Ident]): F[Unit] =
for {
values <- store.transact(QCustomField.findAllValues(items))
byField = values.groupBy(_.field.name)
byField = values.groupBy(_.field.name)
newValues = mergeFields(items.head, byField)
_ <- newValues.traverse(fv =>
store.transact(RCustomField.setValue(fv.field, items.head, fv.value))

View File

@ -77,7 +77,7 @@ object OClientSettings {
)
)
userId <- getUserId(account)
data <- OptionT(store.transact(RClientSettings.find(clientId, userId)))
data <- OptionT(store.transact(RClientSettings.find(clientId, userId)))
} yield data).value
})

View File

@ -101,27 +101,27 @@ object OCollective {
sealed trait PassResetResult
object PassResetResult {
case class Success(newPw: Password) extends PassResetResult
case object NotFound extends PassResetResult
case object UserNotLocal extends PassResetResult
case object NotFound extends PassResetResult
case object UserNotLocal extends PassResetResult
def success(np: Password): PassResetResult = Success(np)
def notFound: PassResetResult = NotFound
def userNotLocal: PassResetResult = UserNotLocal
def notFound: PassResetResult = NotFound
def userNotLocal: PassResetResult = UserNotLocal
}
sealed trait PassChangeResult
object PassChangeResult {
case object UserNotFound extends PassChangeResult
case object UserNotFound extends PassChangeResult
case object PasswordMismatch extends PassChangeResult
case object UpdateFailed extends PassChangeResult
case object UserNotLocal extends PassChangeResult
case object Success extends PassChangeResult
case object UpdateFailed extends PassChangeResult
case object UserNotLocal extends PassChangeResult
case object Success extends PassChangeResult
def userNotFound: PassChangeResult = UserNotFound
def userNotFound: PassChangeResult = UserNotFound
def passwordMismatch: PassChangeResult = PasswordMismatch
def success: PassChangeResult = Success
def updateFailed: PassChangeResult = UpdateFailed
def userNotLocal: PassChangeResult = UserNotLocal
def success: PassChangeResult = Success
def updateFailed: PassChangeResult = UpdateFailed
def userNotLocal: PassChangeResult = UserNotLocal
}
def apply[F[_]: Async](
@ -149,9 +149,9 @@ object OCollective {
private def updateLearnClassifierTask(coll: Ident, sett: Settings): F[Unit] =
for {
id <- Ident.randomId[F]
on = sett.classifier.map(_.enabled).getOrElse(false)
on = sett.classifier.map(_.enabled).getOrElse(false)
timer = sett.classifier.map(_.schedule).getOrElse(CalEvent.unsafe(""))
args = LearnClassifierArgs(coll)
args = LearnClassifierArgs(coll)
ut = UserTask(
id,
LearnClassifierArgs.taskName,
@ -168,7 +168,7 @@ object OCollective {
for {
id <- Ident.randomId[F]
settings = sett.emptyTrash.getOrElse(EmptyTrash.default)
args = EmptyTrashArgs(coll, settings.minAge)
args = EmptyTrashArgs(coll, settings.minAge)
ut = UserTask(id, EmptyTrashArgs.taskName, true, settings.schedule, None, args)
_ <- uts.updateOneTask(UserTaskScope(coll), args.makeSubject.some, ut)
_ <- joex.notifyAllNodes
@ -187,8 +187,8 @@ object OCollective {
args
).encode.toPeriodicTask(UserTaskScope(collective), args.makeSubject.some)
job <- ut.toJob
_ <- queue.insert(job)
_ <- joex.notifyAllNodes
_ <- queue.insert(job)
_ <- joex.notifyAllNodes
} yield ()
def startEmptyTrash(args: EmptyTrashArgs): F[Unit] =
@ -203,8 +203,8 @@ object OCollective {
args
).encode.toPeriodicTask(UserTaskScope(args.collective), args.makeSubject.some)
job <- ut.toJob
_ <- queue.insert(job)
_ <- joex.notifyAllNodes
_ <- queue.insert(job)
_ <- joex.notifyAllNodes
} yield ()
def findSettings(collective: Ident): F[Option[OCollective.Settings]] =

View File

@ -88,15 +88,15 @@ object OCustomFields {
sealed trait SetValueResult
object SetValueResult {
case object ItemNotFound extends SetValueResult
case object FieldNotFound extends SetValueResult
case object ItemNotFound extends SetValueResult
case object FieldNotFound extends SetValueResult
case class ValueInvalid(msg: String) extends SetValueResult
case object Success extends SetValueResult
case object Success extends SetValueResult
def itemNotFound: SetValueResult = ItemNotFound
def fieldNotFound: SetValueResult = FieldNotFound
def itemNotFound: SetValueResult = ItemNotFound
def fieldNotFound: SetValueResult = FieldNotFound
def valueInvalid(msg: String): SetValueResult = ValueInvalid(msg)
def success: SetValueResult = Success
def success: SetValueResult = Success
}
case class RemoveValue(
@ -109,12 +109,12 @@ object OCustomFields {
object CustomFieldOrder {
import docspell.store.qb.DSL._
final case object NameAsc extends CustomFieldOrder
final case object NameDesc extends CustomFieldOrder
final case object LabelAsc extends CustomFieldOrder
final case object NameAsc extends CustomFieldOrder
final case object NameDesc extends CustomFieldOrder
final case object LabelAsc extends CustomFieldOrder
final case object LabelDesc extends CustomFieldOrder
final case object TypeAsc extends CustomFieldOrder
final case object TypeDesc extends CustomFieldOrder
final case object TypeAsc extends CustomFieldOrder
final case object TypeDesc extends CustomFieldOrder
def parse(str: String): Either[String, CustomFieldOrder] =
str.toLowerCase match {
@ -172,7 +172,7 @@ object OCustomFields {
def create(field: NewCustomField): F[AddResult] = {
val exists = RCustomField.exists(field.name, field.cid)
val insert = for {
id <- Ident.randomId[ConnectionIO]
id <- Ident.randomId[ConnectionIO]
now <- Timestamp.current[ConnectionIO]
rec = RCustomField(id, field.name, field.label, field.cid, field.ftype, now)
n <- RCustomField.insert(rec)
@ -188,9 +188,9 @@ object OCustomFields {
val update =
for {
field <- OptionT(RCustomField.findByIdOrName(fieldIdOrName, coll))
_ <- OptionT.liftF(logger.info(s"Deleting field: $field"))
n <- OptionT.liftF(RCustomFieldValue.deleteByField(field.id))
k <- OptionT.liftF(RCustomField.deleteById(field.id, coll))
_ <- OptionT.liftF(logger.info(s"Deleting field: $field"))
n <- OptionT.liftF(RCustomFieldValue.deleteByField(field.id))
k <- OptionT.liftF(RCustomField.deleteById(field.id, coll))
} yield n + k
UpdateResult.fromUpdate(store.transact(update.getOrElse(0)))
@ -230,8 +230,8 @@ object OCustomFields {
val update =
for {
field <- OptionT(RCustomField.findByIdOrName(in.field, in.collective))
_ <- OptionT.liftF(logger.debug(s"Field found by '${in.field}': $field"))
n <- OptionT.liftF(RCustomFieldValue.deleteValue(field.id, in.item))
_ <- OptionT.liftF(logger.debug(s"Field found by '${in.field}': $field"))
n <- OptionT.liftF(RCustomFieldValue.deleteValue(field.id, in.item))
} yield n
UpdateResult.fromUpdate(store.transact(update.getOrElse(0)))

View File

@ -36,7 +36,7 @@ object OEquipment {
sealed trait EquipmentOrder
object EquipmentOrder {
final case object NameAsc extends EquipmentOrder
final case object NameAsc extends EquipmentOrder
final case object NameDesc extends EquipmentOrder
def parse(str: String): Either[String, EquipmentOrder] =

View File

@ -65,9 +65,9 @@ object OFolder {
sealed trait FolderOrder
object FolderOrder {
final case object NameAsc extends FolderOrder
final case object NameDesc extends FolderOrder
final case object OwnerAsc extends FolderOrder
final case object NameAsc extends FolderOrder
final case object NameDesc extends FolderOrder
final case object OwnerAsc extends FolderOrder
final case object OwnerDesc extends FolderOrder
def parse(str: String): Either[String, FolderOrder] =

View File

@ -49,8 +49,7 @@ trait OFulltext[F[_]] {
def findIndexOnlySummary(account: AccountId, fts: OFulltext.FtsInput): F[SearchSummary]
def findItemsSummary(q: Query, fts: OFulltext.FtsInput): F[SearchSummary]
/** Clears the full-text index completely and launches a task that indexes all data.
*/
/** Clears the full-text index completely and launches a task that indexes all data. */
def reindexAll: F[Unit]
/** Clears the full-text index for the given collective and starts a task indexing all
@ -92,9 +91,9 @@ object OFulltext {
Resource.pure[F, OFulltext[F]](new OFulltext[F] {
def reindexAll: F[Unit] =
for {
_ <- logger.finfo(s"Re-index all.")
_ <- logger.finfo(s"Re-index all.")
job <- JobFactory.reIndexAll[F]
_ <- queue.insertIfNew(job) *> joex.notifyAllNodes
_ <- queue.insertIfNew(job) *> joex.notifyAllNodes
} yield ()
def reindexCollective(account: AccountId): F[Unit] =
@ -124,9 +123,9 @@ object OFulltext {
FtsQuery.HighlightSetting(ftsQ.highlightPre, ftsQ.highlightPost)
)
for {
_ <- logger.ftrace(s"Find index only: ${ftsQ.query}/$batch")
_ <- logger.ftrace(s"Find index only: ${ftsQ.query}/$batch")
folders <- store.transact(QFolder.getMemberFolders(account))
ftsR <- fts.search(fq.withFolders(folders))
ftsR <- fts.search(fq.withFolders(folders))
ftsItems = ftsR.results.groupBy(_.itemId)
select =
ftsItems.values
@ -173,7 +172,7 @@ object OFulltext {
for {
folder <- store.transact(QFolder.getMemberFolders(account))
now <- Timestamp.current[F]
now <- Timestamp.current[F]
itemIds <- fts
.searchAll(fq.withFolders(folder))
.flatMap(r => Stream.emits(r.results.map(_.itemId)))
@ -290,7 +289,7 @@ object OFulltext {
val qres =
for {
items <- sqlResult
ids = items.map(a => ItemId[A].itemId(a))
ids = items.map(a => ItemId[A].itemId(a))
idsNel = NonEmptyList.fromFoldable(ids)
// must find all index results involving the items.
// Currently there is one result per item + one result per
@ -301,7 +300,7 @@ object OFulltext {
ftsQ = fq.copy(items = ids.toSet, limit = limit)
ftsR <- fts.search(ftsQ)
ftsItems = ftsR.results.groupBy(_.itemId)
res = items.collect(convert(ftsR, ftsItems))
res = items.collect(convert(ftsR, ftsItems))
} yield (items.size, res)
Stream.eval(qres) ++ findItemsFts0(q, ftsQ, batch.next, search, convert)

View File

@ -188,23 +188,20 @@ trait OItem[F[_]] {
notifyJoex: Boolean
): F[UpdateResult]
/** Submits a task that (re)generates the preview image for an attachment.
*/
/** Submits a task that (re)generates the preview image for an attachment. */
def generatePreview(
args: MakePreviewArgs,
account: AccountId,
notifyJoex: Boolean
): F[UpdateResult]
/** Submits a task that (re)generates the preview images for all attachments.
*/
/** Submits a task that (re)generates the preview images for all attachments. */
def generateAllPreviews(
storeMode: MakePreviewArgs.StoreMode,
notifyJoex: Boolean
): F[UpdateResult]
/** Merges a list of items into one item. The remaining items are deleted.
*/
/** Merges a list of items into one item. The remaining items are deleted. */
def merge(
logger: Logger[F],
items: NonEmptyList[Ident],
@ -222,8 +219,8 @@ object OItem {
joex: OJoex[F]
): Resource[F, OItem[F]] =
for {
otag <- OTag(store)
oorg <- OOrganization(store)
otag <- OTag(store)
oorg <- OOrganization(store)
oequip <- OEquipment(store)
logger <- Resource.pure[F, Logger[F]](Logger.log4s(getLogger))
oitem <- Resource.pure[F, OItem[F]](new OItem[F] {
@ -312,11 +309,11 @@ object OItem {
case kws =>
val db =
(for {
_ <- OptionT(RItem.checkByIdAndCollective(item, collective))
_ <- OptionT(RItem.checkByIdAndCollective(item, collective))
given <- OptionT.liftF(RTag.findAllByNameOrId(kws, collective))
exist <- OptionT.liftF(RTagItem.findAllIn(item, given.map(_.tagId)))
remove = given.map(_.tagId).toSet.intersect(exist.map(_.tagId).toSet)
toadd = given.map(_.tagId).diff(exist.map(_.tagId))
toadd = given.map(_.tagId).diff(exist.map(_.tagId))
_ <- OptionT.liftF(RTagItem.setAllTags(item, toadd))
_ <- OptionT.liftF(RTagItem.removeAllTags(item, remove.toSeq))
} yield UpdateResult.success).getOrElse(UpdateResult.notFound)
@ -337,9 +334,9 @@ object OItem {
collective: Ident
): F[UpdateResult] =
UpdateResult.fromUpdate(store.transact(for {
k <- RTagItem.deleteItemTags(items, collective)
k <- RTagItem.deleteItemTags(items, collective)
rtags <- RTag.findAllByNameOrId(tags, collective)
res <- items.traverse(i => RTagItem.setAllTags(i, rtags.map(_.tagId)))
res <- items.traverse(i => RTagItem.setAllTags(i, rtags.map(_.tagId)))
n = res.fold
} yield k + n))
@ -733,8 +730,8 @@ object OItem {
): F[UpdateResult] =
for {
job <- JobFactory.convertAllPdfs[F](collective, submitter, Priority.Low)
_ <- queue.insertIfNew(job)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
_ <- queue.insertIfNew(job)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UpdateResult.success
def generatePreview(
@ -744,8 +741,8 @@ object OItem {
): F[UpdateResult] =
for {
job <- JobFactory.makePreview[F](args, account.some)
_ <- queue.insertIfNew(job)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
_ <- queue.insertIfNew(job)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UpdateResult.success
def generateAllPreviews(
@ -754,8 +751,8 @@ object OItem {
): F[UpdateResult] =
for {
job <- JobFactory.allPreviews[F](AllPreviewsArgs(None, storeMode), None)
_ <- queue.insertIfNew(job)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
_ <- queue.insertIfNew(job)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UpdateResult.success
private def onSuccessIgnoreError(update: F[Unit])(ar: UpdateResult): F[Unit] =

View File

@ -94,7 +94,7 @@ object OItemSearch {
}
case class AttachmentData[F[_]](ra: RAttachment, meta: RFileMeta, data: Stream[F, Byte])
extends BinaryData[F] {
val name = ra.name
val name = ra.name
val fileId = ra.fileId
}
@ -103,7 +103,7 @@ object OItemSearch {
meta: RFileMeta,
data: Stream[F, Byte]
) extends BinaryData[F] {
val name = rs.name
val name = rs.name
val fileId = rs.fileId
}
@ -112,7 +112,7 @@ object OItemSearch {
meta: RFileMeta,
data: Stream[F, Byte]
) extends BinaryData[F] {
val name = rs.name
val name = rs.name
val fileId = rs.fileId
}
@ -121,7 +121,7 @@ object OItemSearch {
meta: RFileMeta,
data: Stream[F, Byte]
) extends BinaryData[F] {
val name = rs.name
val name = rs.name
val fileId = rs.fileId
}
@ -290,7 +290,7 @@ object OItemSearch {
def findByFileSource(checksum: String, sourceId: Ident): F[Option[Vector[RItem]]] =
store.transact((for {
coll <- OptionT(RSource.findCollective(sourceId))
coll <- OptionT(RSource.findCollective(sourceId))
items <- OptionT.liftF(QItem.findByChecksum(checksum, coll, Set.empty))
} yield items).value)

View File

@ -31,13 +31,13 @@ object OJob {
sealed trait JobCancelResult
object JobCancelResult {
case object Removed extends JobCancelResult
case object Removed extends JobCancelResult
case object CancelRequested extends JobCancelResult
case object JobNotFound extends JobCancelResult
case object JobNotFound extends JobCancelResult
def removed: JobCancelResult = Removed
def removed: JobCancelResult = Removed
def cancelRequested: JobCancelResult = CancelRequested
def jobNotFound: JobCancelResult = JobNotFound
def jobNotFound: JobCancelResult = JobNotFound
}
case class JobDetail(job: RJob, logs: Vector[RJobLog])

View File

@ -32,12 +32,12 @@ object OJoex {
def notifyAllNodes: F[Unit] =
for {
nodes <- store.transact(RNode.findAll(NodeType.Joex))
_ <- nodes.toList.traverse(n => client.notifyJoexIgnoreErrors(n.url))
_ <- nodes.toList.traverse(n => client.notifyJoexIgnoreErrors(n.url))
} yield ()
def cancelJob(job: Ident, worker: Ident): F[Boolean] =
(for {
node <- OptionT(store.transact(RNode.findById(worker)))
node <- OptionT(store.transact(RNode.findById(worker)))
cancel <- OptionT.liftF(client.cancelJob(node.url, job))
} yield cancel.success).getOrElse(false)
})

View File

@ -162,7 +162,7 @@ object OMail {
def createSmtpSettings(accId: AccountId, s: SmtpSettings): F[AddResult] =
(for {
ru <- OptionT(store.transact(s.toRecord(accId).value))
ins = RUserEmail.insert(ru)
ins = RUserEmail.insert(ru)
exists = RUserEmail.exists(ru.uid, ru.name)
res <- OptionT.liftF(store.add(ins, exists))
} yield res).getOrElse(AddResult.Failure(new Exception("User not found")))
@ -175,7 +175,7 @@ object OMail {
val op = for {
um <- OptionT(RUserEmail.getByName(accId, name))
ru <- data.toRecord(accId)
n <- OptionT.liftF(RUserEmail.update(um.id, ru))
n <- OptionT.liftF(RUserEmail.update(um.id, ru))
} yield n
store.transact(op.value).map(_.getOrElse(0))
@ -193,7 +193,7 @@ object OMail {
def createImapSettings(accId: AccountId, data: ImapSettings): F[AddResult] =
(for {
ru <- OptionT(store.transact(data.toRecord(accId).value))
ins = RUserImap.insert(ru)
ins = RUserImap.insert(ru)
exists = RUserImap.exists(ru.uid, ru.name)
res <- OptionT.liftF(store.add(ins, exists))
} yield res).getOrElse(AddResult.Failure(new Exception("User not found")))
@ -206,7 +206,7 @@ object OMail {
val op = for {
um <- OptionT(RUserImap.getByName(accId, name))
ru <- data.toRecord(accId)
n <- OptionT.liftF(RUserImap.update(um.id, ru))
n <- OptionT.liftF(RUserImap.update(um.id, ru))
} yield n
store.transact(op.value).map(_.getOrElse(0))
@ -284,9 +284,9 @@ object OMail {
(for {
mailCfg <- getSmtpSettings
mail <- createMail(mailCfg)
mid <- OptionT.liftF(sendMail(mailCfg.toMailConfig, mail))
res <- mid.traverse(id => OptionT.liftF(storeMail(id, mailCfg)))
mail <- createMail(mailCfg)
mid <- OptionT.liftF(sendMail(mailCfg.toMailConfig, mail))
res <- mid.traverse(id => OptionT.liftF(storeMail(id, mailCfg)))
conv = res.fold(identity, _.fold(identity, id => SendResult.Success(id)))
} yield conv).getOrElse(SendResult.NotFound)
}

View File

@ -32,8 +32,8 @@ object ONode {
def register(appId: Ident, nodeType: NodeType, uri: LenientUri): F[Unit] =
for {
node <- RNode(appId, nodeType, uri)
_ <- logger.finfo(s"Registering node ${node.id.id}")
_ <- store.transact(RNode.set(node))
_ <- logger.finfo(s"Registering node ${node.id.id}")
_ <- store.transact(RNode.set(node))
} yield ()
def unregister(appId: Ident): F[Unit] =

View File

@ -72,7 +72,7 @@ object OOrganization {
sealed trait OrganizationOrder
object OrganizationOrder {
final case object NameAsc extends OrganizationOrder
final case object NameAsc extends OrganizationOrder
final case object NameDesc extends OrganizationOrder
def parse(str: String): Either[String, OrganizationOrder] =
@ -94,10 +94,10 @@ object OOrganization {
sealed trait PersonOrder
object PersonOrder {
final case object NameAsc extends PersonOrder
final case object NameAsc extends PersonOrder
final case object NameDesc extends PersonOrder
final case object OrgAsc extends PersonOrder
final case object OrgDesc extends PersonOrder
final case object OrgAsc extends PersonOrder
final case object OrgDesc extends PersonOrder
def parse(str: String): Either[String, PersonOrder] =
str.toLowerCase match {

View File

@ -48,8 +48,7 @@ trait OSimpleSearch[F[_]] {
): F[StringSearchResult[Items]] =
OSimpleSearch.applySearch[F, Items](fix, q)((iq, fts) => search(settings)(iq, fts))
/** Same as `searchByString` but returning a summary instead of the results.
*/
/** Same as `searchByString` but returning a summary instead of the results. */
final def searchSummaryByString(
settings: StatsSettings
)(fix: Query.Fix, q: ItemQueryString)(implicit

View File

@ -29,8 +29,7 @@ trait OTag[F[_]] {
def delete(id: Ident, collective: Ident): F[AddResult]
/** Load all tags given their ids. Ids that are not available are ignored.
*/
/** Load all tags given their ids. Ids that are not available are ignored. */
def loadAll(ids: List[Ident]): F[Vector[RTag]]
}
@ -39,9 +38,9 @@ object OTag {
sealed trait TagOrder
object TagOrder {
final case object NameAsc extends TagOrder
final case object NameDesc extends TagOrder
final case object CategoryAsc extends TagOrder
final case object NameAsc extends TagOrder
final case object NameDesc extends TagOrder
final case object CategoryAsc extends TagOrder
final case object CategoryDesc extends TagOrder
def parse(str: String): Either[String, TagOrder] =
@ -92,9 +91,9 @@ object OTag {
def delete(id: Ident, collective: Ident): F[AddResult] = {
val io = for {
optTag <- RTag.findByIdAndCollective(id, collective)
n0 <- optTag.traverse(t => RTagItem.deleteTag(t.tagId))
n1 <- optTag.traverse(t => RTagSource.deleteTag(t.tagId))
n2 <- optTag.traverse(t => RTag.delete(t.tagId, collective))
n0 <- optTag.traverse(t => RTagItem.deleteTag(t.tagId))
n1 <- optTag.traverse(t => RTagSource.deleteTag(t.tagId))
n2 <- optTag.traverse(t => RTag.delete(t.tagId, collective))
} yield (n0 |+| n1 |+| n2).getOrElse(0)
store.transact(io).attempt.map(AddResult.fromUpdate)
}

View File

@ -56,8 +56,8 @@ object OTotp {
s"otpauth://totp/$issuer:${accountId.asString}?secret=${key.data.toBase32}&issuer=$issuer"
)
}
case object AlreadyExists extends InitResult
case object NotFound extends InitResult
case object AlreadyExists extends InitResult
case object NotFound extends InitResult
final case class Failed(ex: Throwable) extends InitResult
def success(accountId: AccountId, key: Key): InitResult =
@ -71,7 +71,7 @@ object OTotp {
sealed trait ConfirmResult
object ConfirmResult {
case object Success extends ConfirmResult
case object Failed extends ConfirmResult
case object Failed extends ConfirmResult
}
def apply[F[_]: Async](store: Store[F], totp: Totp): Resource[F, OTotp[F]] =
@ -80,13 +80,13 @@ object OTotp {
def initialize(accountId: AccountId): F[InitResult] =
for {
_ <- log.info(s"Initializing TOTP for account ${accountId.asString}")
_ <- log.info(s"Initializing TOTP for account ${accountId.asString}")
userId <- store.transact(RUser.findIdByAccount(accountId))
result <- userId match {
case Some(uid) =>
for {
record <- RTotp.generate[F](uid, totp.settings.mac)
un <- store.transact(RTotp.updateDisabled(record))
un <- store.transact(RTotp.updateDisabled(record))
an <-
if (un != 0)
AddResult.entityExists("Entity exists, but update was ok").pure[F]
@ -117,7 +117,7 @@ object OTotp {
def confirmInit(accountId: AccountId, otp: OnetimePassword): F[ConfirmResult] =
for {
_ <- log.info(s"Confirm TOTP setup for account ${accountId.asString}")
_ <- log.info(s"Confirm TOTP setup for account ${accountId.asString}")
key <- store.transact(RTotp.findEnabledByLogin(accountId, false))
now <- Timestamp.current[F]
res <- key match {

View File

@ -102,8 +102,7 @@ object OUpload {
def noSource: UploadResult = NoSource
/** When adding files to an item, no item was found using the given item-id.
*/
/** When adding files to an item, no item was found using the given item-id. */
case object NoItem extends UploadResult
def noItem: UploadResult = NoItem
@ -126,9 +125,9 @@ object OUpload {
itemId: Option[Ident]
): F[OUpload.UploadResult] =
(for {
_ <- checkExistingItem(itemId, account.collective)
_ <- checkExistingItem(itemId, account.collective)
files <- right(data.files.traverse(saveFile).map(_.flatten))
_ <- checkFileList(files)
_ <- checkFileList(files)
lang <- data.meta.language match {
case Some(lang) => right(lang.pure[F])
case None =>
@ -156,8 +155,8 @@ object OUpload {
if (data.multiple) files.map(f => ProcessItemArgs(meta, List(f)))
else Vector(ProcessItemArgs(meta, files.toList))
jobs <- right(makeJobs(args, account, data.priority, data.tracker))
_ <- right(logger.fdebug(s"Storing jobs: $jobs"))
res <- right(submitJobs(notifyJoex)(jobs))
_ <- right(logger.fdebug(s"Storing jobs: $jobs"))
res <- right(submitJobs(notifyJoex)(jobs))
_ <- right(
store.transact(
RSource.incrementCounter(data.meta.sourceAbbrev, account.collective)

View File

@ -19,8 +19,7 @@ import io.circe.Encoder
trait OUserTask[F[_]] {
/** Return the settings for all scan-mailbox tasks of the current user.
*/
/** Return the settings for all scan-mailbox tasks of the current user. */
def getScanMailbox(scope: UserTaskScope): Stream[F, UserTask[ScanMailboxArgs]]
/** Find a scan-mailbox task by the given id. */
@ -29,16 +28,14 @@ trait OUserTask[F[_]] {
scope: UserTaskScope
): OptionT[F, UserTask[ScanMailboxArgs]]
/** Updates the scan-mailbox tasks and notifies the joex nodes.
*/
/** Updates the scan-mailbox tasks and notifies the joex nodes. */
def submitScanMailbox(
scope: UserTaskScope,
subject: Option[String],
task: UserTask[ScanMailboxArgs]
): F[Unit]
/** Return the settings for all the notify-due-items task of the current user.
*/
/** Return the settings for all the notify-due-items task of the current user. */
def getNotifyDueItems(scope: UserTaskScope): Stream[F, UserTask[NotifyDueItemsArgs]]
/** Find a notify-due-items task by the given id. */
@ -47,8 +44,7 @@ trait OUserTask[F[_]] {
scope: UserTaskScope
): OptionT[F, UserTask[NotifyDueItemsArgs]]
/** Updates the notify-due-items tasks and notifies the joex nodes.
*/
/** Updates the notify-due-items tasks and notifies the joex nodes. */
def submitNotifyDueItems(
scope: UserTaskScope,
subject: Option[String],
@ -80,9 +76,9 @@ object OUserTask {
): F[Unit] =
for {
ptask <- task.encode.toPeriodicTask(scope, subject)
job <- ptask.toJob
_ <- queue.insert(job)
_ <- joex.notifyAllNodes
job <- ptask.toJob
_ <- queue.insert(job)
_ <- joex.notifyAllNodes
} yield ()
def getScanMailbox(scope: UserTaskScope): Stream[F, UserTask[ScanMailboxArgs]] =

View File

@ -12,19 +12,15 @@ sealed trait SendResult
object SendResult {
/** Mail was successfully sent and stored to db.
*/
/** Mail was successfully sent and stored to db. */
case class Success(id: Ident) extends SendResult
/** There was a failure sending the mail. The mail is then not saved to db.
*/
/** There was a failure sending the mail. The mail is then not saved to db. */
case class SendFailure(ex: Throwable) extends SendResult
/** The mail was successfully sent, but storing to db failed.
*/
/** The mail was successfully sent, but storing to db failed. */
case class StoreFailure(ex: Throwable) extends SendResult
/** Something could not be found required for sending (mail configs, items etc).
*/
/** Something could not be found required for sending (mail configs, items etc). */
case object NotFound extends SendResult
}

View File

@ -41,7 +41,7 @@ object Config {
Decoder.decodeString.emap(fromString)
}
def open: Mode = Mode.Open
def open: Mode = Mode.Open
def invite: Mode = Mode.Invite
def closed: Mode = Mode.Closed

View File

@ -15,11 +15,11 @@ sealed trait NewInviteResult { self: Product =>
}
object NewInviteResult {
case class Success(id: Ident) extends NewInviteResult
case class Success(id: Ident) extends NewInviteResult
case object InvitationDisabled extends NewInviteResult
case object PasswordMismatch extends NewInviteResult
case object PasswordMismatch extends NewInviteResult
def passwordMismatch: NewInviteResult = PasswordMismatch
def invitationClosed: NewInviteResult = InvitationDisabled
def passwordMismatch: NewInviteResult = PasswordMismatch
def invitationClosed: NewInviteResult = InvitationDisabled
def success(id: Ident): NewInviteResult = Success(id)
}

View File

@ -12,17 +12,17 @@ sealed trait SignupResult {}
object SignupResult {
case object CollectiveExists extends SignupResult
case object InvalidInvitationKey extends SignupResult
case object SignupClosed extends SignupResult
case object CollectiveExists extends SignupResult
case object InvalidInvitationKey extends SignupResult
case object SignupClosed extends SignupResult
case class Failure(ex: Throwable) extends SignupResult
case object Success extends SignupResult
case object Success extends SignupResult
def collectiveExists: SignupResult = CollectiveExists
def invalidInvitationKey: SignupResult = InvalidInvitationKey
def signupClosed: SignupResult = SignupClosed
def collectiveExists: SignupResult = CollectiveExists
def invalidInvitationKey: SignupResult = InvalidInvitationKey
def signupClosed: SignupResult = SignupClosed
def failure(ex: Throwable): SignupResult = Failure(ex)
def success: SignupResult = Success
def success: SignupResult = Success
def fromAddResult(ar: AddResult): SignupResult =
ar match {

View File

@ -18,7 +18,7 @@ sealed trait AccountSource { self: Product =>
object AccountSource {
case object Local extends AccountSource
case object Local extends AccountSource
case object OpenId extends AccountSource
val all: NonEmptyList[AccountSource] =

View File

@ -62,10 +62,10 @@ object Binary {
private val utf8Bom: Chunk[Byte] = Chunk(0xef.toByte, 0xbb.toByte, 0xbf.toByte)
def decode[F[_]](charset: Charset): Pipe[F, Byte, String] = {
val decoder = charset.newDecoder
val decoder = charset.newDecoder
val maxCharsPerByte = math.ceil(decoder.maxCharsPerByte().toDouble).toInt
val avgBytesPerChar = math.ceil(1.0 / decoder.averageCharsPerByte().toDouble).toInt
val charBufferSize = 128
val charBufferSize = 128
_.repeatPull[String] {
_.unconsN(charBufferSize * avgBytesPerChar, allowFewer = true).flatMap {
@ -79,9 +79,9 @@ object Binary {
case Some((chunk, stream)) =>
if (chunk.nonEmpty) {
val chunkWithoutBom = skipByteOrderMark(chunk)
val bytes = chunkWithoutBom.toArray
val byteBuffer = ByteBuffer.wrap(bytes)
val charBuffer = CharBuffer.allocate(bytes.length * maxCharsPerByte)
val bytes = chunkWithoutBom.toArray
val byteBuffer = ByteBuffer.wrap(bytes)
val charBuffer = CharBuffer.allocate(bytes.length * maxCharsPerByte)
decoder.decode(byteBuffer, charBuffer, false)
val nextStream = stream.consChunk(Chunk.byteBuffer(byteBuffer.slice()))
Pull.output1(charBuffer.flip().toString).as(Some(nextStream))

View File

@ -23,8 +23,7 @@ object CollectiveState {
/** A collective that has been explicitely closed. */
case object Closed extends CollectiveState
/** A collective blocked by a super user, usually some emergency action.
*/
/** A collective blocked by a super user, usually some emergency action. */
case object Blocked extends CollectiveState
def fromString(s: String): Either[String, CollectiveState] =

View File

@ -16,10 +16,10 @@ sealed trait ContactKind { self: Product =>
object ContactKind {
val all = List()
case object Phone extends ContactKind
case object Mobile extends ContactKind
case object Fax extends ContactKind
case object Email extends ContactKind
case object Phone extends ContactKind
case object Mobile extends ContactKind
case object Fax extends ContactKind
case object Email extends ContactKind
case object Website extends ContactKind
def fromString(s: String): Either[String, ContactKind] =

View File

@ -93,11 +93,11 @@ object CustomFieldType {
v.setScale(2, BigDecimal.RoundingMode.HALF_EVEN)
}
def text: CustomFieldType = Text
def text: CustomFieldType = Text
def numeric: CustomFieldType = Numeric
def date: CustomFieldType = Date
def bool: CustomFieldType = Bool
def money: CustomFieldType = Money
def date: CustomFieldType = Date
def bool: CustomFieldType = Bool
def money: CustomFieldType = Money
val all: NonEmptyList[CustomFieldType] =
NonEmptyList.of(Text, Numeric, Date, Bool, Money)

View File

@ -8,9 +8,9 @@ package docspell.common
object DocspellSystem {
val user = Ident.unsafe("docspell-system")
val taskGroup = user
val migrationTaskTracker = Ident.unsafe("full-text-index-tracker")
val allPreviewTaskTracker = Ident.unsafe("generate-all-previews")
val user = Ident.unsafe("docspell-system")
val taskGroup = user
val migrationTaskTracker = Ident.unsafe("full-text-index-tracker")
val allPreviewTaskTracker = Ident.unsafe("generate-all-previews")
val allPageCountTaskTracker = Ident.unsafe("all-page-count-tracker")
}

View File

@ -22,15 +22,15 @@ object EnvMode {
private val envName = "DOCSPELL_ENV"
case object Dev extends EnvMode {
val isDev = true
val isDev = true
val isProd = false
}
case object Prod extends EnvMode {
val isDev = false
val isDev = false
val isProd = true
}
def dev: EnvMode = Dev
def dev: EnvMode = Dev
def prod: EnvMode = Prod
def fromString(s: String): Either[String, EnvMode] =

View File

@ -20,10 +20,10 @@ sealed trait EquipmentUse { self: Product =>
object EquipmentUse {
case object Concerning extends EquipmentUse
case object Disabled extends EquipmentUse
case object Disabled extends EquipmentUse
def concerning: EquipmentUse = Concerning
def disabled: EquipmentUse = Disabled
def disabled: EquipmentUse = Disabled
val all: NonEmptyList[EquipmentUse] =
NonEmptyList.of(concerning, disabled)

View File

@ -43,12 +43,12 @@ object Glob {
}
private val separator = '/'
private val anyChar = '|'
private val anyChar = '|'
val all = new Glob {
def matches(caseSensitive: Boolean)(in: String) = true
def matchFilenameOrPath(in: String) = true
val asString = "*"
def matchFilenameOrPath(in: String) = true
val asString = "*"
}
def pattern(pattern: Pattern): Glob =
@ -142,7 +142,7 @@ object Glob {
}
def startsWith(prefix: String, caseSensitive: Boolean): Boolean = {
val vstr = if (caseSensitive) str else str.toLowerCase
val vstr = if (caseSensitive) str else str.toLowerCase
val vprefix = if (caseSensitive) prefix else prefix.toLowerCase
vstr.startsWith(vprefix)
}

View File

@ -24,17 +24,17 @@ sealed trait ItemState { self: Product =>
object ItemState {
case object Premature extends ItemState
case object Premature extends ItemState
case object Processing extends ItemState
case object Created extends ItemState
case object Confirmed extends ItemState
case object Deleted extends ItemState
case object Created extends ItemState
case object Confirmed extends ItemState
case object Deleted extends ItemState
def premature: ItemState = Premature
def premature: ItemState = Premature
def processing: ItemState = Processing
def created: ItemState = Created
def confirmed: ItemState = Confirmed
def deleted: ItemState = Deleted
def created: ItemState = Created
def confirmed: ItemState = Confirmed
def deleted: ItemState = Deleted
def fromString(str: String): Either[String, ItemState] =
str.toLowerCase match {

View File

@ -20,8 +20,7 @@ object JobState {
/** Waiting for being executed. */
case object Waiting extends JobState {}
/** A scheduler has picked up this job and will pass it to the next free slot.
*/
/** A scheduler has picked up this job and will pass it to the next free slot. */
case object Scheduled extends JobState {}
/** Is currently executing */
@ -39,17 +38,17 @@ object JobState {
/** Finished with success */
case object Success extends JobState {}
val waiting: JobState = Waiting
val stuck: JobState = Stuck
val waiting: JobState = Waiting
val stuck: JobState = Stuck
val scheduled: JobState = Scheduled
val running: JobState = Running
val failed: JobState = Failed
val running: JobState = Running
val failed: JobState = Failed
val cancelled: JobState = Cancelled
val success: JobState = Success
val success: JobState = Success
val all: NonEmptyList[JobState] =
NonEmptyList.of(Waiting, Scheduled, Running, Stuck, Failed, Cancelled, Success)
val queued: Set[JobState] = Set(Waiting, Scheduled, Stuck)
val queued: Set[JobState] = Set(Waiting, Scheduled, Stuck)
val done: NonEmptyList[JobState] = NonEmptyList.of(Failed, Cancelled, Success)
val notDone: NonEmptyList[JobState] = //all - done
NonEmptyList.of(Waiting, Scheduled, Running, Stuck)

View File

@ -39,7 +39,7 @@ object JvmInfo {
MemoryUsage.createHeap[F].flatMap { mu =>
Sync[F].delay {
val rmb = management.ManagementFactory.getRuntimeMXBean()
val rt = Runtime.getRuntime()
val rt = Runtime.getRuntime()
JvmInfo(
id,
pidHost = rmb.getName(),
@ -84,7 +84,7 @@ object JvmInfo {
def createHeap[F[_]: Sync]: F[MemoryUsage] =
Sync[F].delay {
val mxb = management.ManagementFactory.getMemoryMXBean()
val mxb = management.ManagementFactory.getMemoryMXBean()
val heap = mxb.getHeapMemoryUsage()
MemoryUsage(
init = math.max(0, heap.getInit()),

View File

@ -97,10 +97,10 @@ case class LenientUri(
def asString: String = {
val schemePart = scheme.toList.mkString(":")
val authPart = authority.map(a => s"//$a").getOrElse("")
val pathPart = path.asString
val queryPart = query.map(q => s"?$q").getOrElse("")
val fragPart = fragment.map(f => s"#$f").getOrElse("")
val authPart = authority.map(a => s"//$a").getOrElse("")
val pathPart = path.asString
val queryPart = query.map(q => s"?$q").getOrElse("")
val fragPart = fragment.map(f => s"#$f").getOrElse("")
s"$schemePart:$authPart$pathPart$queryPart$fragPart"
}
}
@ -116,24 +116,24 @@ object LenientUri {
}
case object RootPath extends Path {
val segments = Nil
val isRoot = true
val isEmpty = false
val isRoot = true
val isEmpty = false
def /(seg: String): Path =
NonEmptyPath(NonEmptyList.of(seg))
def asString = "/"
}
case object EmptyPath extends Path {
val segments = Nil
val isRoot = false
val isEmpty = true
val isRoot = false
val isEmpty = true
def /(seg: String): Path =
NonEmptyPath(NonEmptyList.of(seg))
def asString = ""
}
case class NonEmptyPath(segs: NonEmptyList[String]) extends Path {
def segments = segs.toList
val isEmpty = false
val isRoot = false
val isEmpty = false
val isRoot = false
def /(seg: String): Path =
copy(segs = segs.append(seg))
def asString =
@ -215,7 +215,7 @@ object LenientUri {
case -1 =>
Left(s"No scheme found: $str")
case n =>
val scheme = makeScheme(p0.substring(0, n))
val scheme = makeScheme(p0.substring(0, n))
val (path, query, frag) = splitPathQF(p0.substring(n + 1))
scheme match {
case None =>

View File

@ -17,8 +17,8 @@ sealed trait LogLevel { self: Product =>
object LogLevel {
case object Debug extends LogLevel { val toInt = 0 }
case object Info extends LogLevel { val toInt = 1 }
case object Warn extends LogLevel { val toInt = 2 }
case object Info extends LogLevel { val toInt = 1 }
case object Warn extends LogLevel { val toInt = 2 }
case object Error extends LogLevel { val toInt = 3 }
def fromInt(n: Int): LogLevel =

View File

@ -81,8 +81,7 @@ object MetaProposal {
implicit val order: Order[Candidate] =
Order.by(_.ref)
/** This deviates from standard order to sort None at last.
*/
/** This deviates from standard order to sort None at last. */
val weightOrder: Order[Option[Double]] = new Order[Option[Double]] {
def compare(x: Option[Double], y: Option[Double]) =
(x, y) match {

View File

@ -20,7 +20,7 @@ import io.circe.generic.semiauto._
*/
case class MetaProposalList private (proposals: List[MetaProposal]) {
def isEmpty: Boolean = proposals.isEmpty
def isEmpty: Boolean = proposals.isEmpty
def nonEmpty: Boolean = proposals.nonEmpty
def hasResults(mt: MetaProposalType, mts: MetaProposalType*): Boolean =
@ -115,7 +115,7 @@ object MetaProposalList {
MetaProposal
) => Map[MetaProposalType, MetaProposal]
): MetaProposalList = {
val init = Map.empty[MetaProposalType, MetaProposal]
val init = Map.empty[MetaProposalType, MetaProposal]
val merged = ml.foldLeft(init)((map, el) => el.proposals.foldLeft(map)(merge))
fromMap(merged)
}

View File

@ -16,12 +16,12 @@ sealed trait MetaProposalType { self: Product =>
object MetaProposalType {
case object CorrOrg extends MetaProposalType
case object CorrOrg extends MetaProposalType
case object CorrPerson extends MetaProposalType
case object ConcPerson extends MetaProposalType
case object ConcEquip extends MetaProposalType
case object DocDate extends MetaProposalType
case object DueDate extends MetaProposalType
case object ConcEquip extends MetaProposalType
case object DocDate extends MetaProposalType
case object DueDate extends MetaProposalType
val all: List[MetaProposalType] =
List(CorrOrg, CorrPerson, ConcPerson, ConcEquip, DocDate, DueDate)

View File

@ -15,8 +15,7 @@ import docspell.common.syntax.all._
import io.circe.{Decoder, Encoder}
/** A MIME Type impl with just enough features for the use here.
*/
/** A MIME Type impl with just enough features for the use here. */
case class MimeType(primary: String, sub: String, params: Map[String, String]) {
def withParam(name: String, value: String): MimeType =
copy(params = params.updated(name, value))
@ -99,13 +98,13 @@ object MimeType {
parse(str).throwLeft
val octetStream = application("octet-stream")
val pdf = application("pdf")
val zip = application("zip")
val png = image("png")
val jpeg = image("jpeg")
val tiff = image("tiff")
val html = text("html")
val plain = text("plain")
val pdf = application("pdf")
val zip = application("zip")
val png = image("png")
val jpeg = image("jpeg")
val tiff = image("tiff")
val html = text("html")
val plain = text("plain")
val emls = NonEmptyList.of(
MimeType("message", "rfc822", Map.empty),
application("mbox")

View File

@ -17,12 +17,12 @@ sealed trait NerTag { self: Product =>
object NerTag {
case object Organization extends NerTag
case object Person extends NerTag
case object Location extends NerTag
case object Misc extends NerTag
case object Email extends NerTag
case object Website extends NerTag
case object Date extends NerTag
case object Person extends NerTag
case object Location extends NerTag
case object Misc extends NerTag
case object Email extends NerTag
case object Website extends NerTag
case object Date extends NerTag
val all: List[NerTag] = List(Organization, Person, Location)

View File

@ -12,10 +12,10 @@ sealed trait NlpMode { self: Product =>
self.productPrefix
}
object NlpMode {
case object Full extends NlpMode
case object Basic extends NlpMode
case object Full extends NlpMode
case object Basic extends NlpMode
case object RegexOnly extends NlpMode
case object Disabled extends NlpMode
case object Disabled extends NlpMode
def fromString(name: String): Either[String, NlpMode] =
name.toLowerCase match {

View File

@ -16,7 +16,7 @@ sealed trait NodeType { self: Product =>
object NodeType {
case object Restserver extends NodeType
case object Joex extends NodeType
case object Joex extends NodeType
def fromString(str: String): Either[String, NodeType] =
str.toLowerCase match {

View File

@ -20,10 +20,10 @@ sealed trait OrgUse { self: Product =>
object OrgUse {
case object Correspondent extends OrgUse
case object Disabled extends OrgUse
case object Disabled extends OrgUse
def correspondent: OrgUse = Correspondent
def disabled: OrgUse = Disabled
def disabled: OrgUse = Disabled
val all: NonEmptyList[OrgUse] =
NonEmptyList.of(correspondent, disabled)

View File

@ -20,13 +20,13 @@ sealed trait PersonUse { self: Product =>
object PersonUse {
case object Correspondent extends PersonUse
case object Concerning extends PersonUse
case object Both extends PersonUse
case object Disabled extends PersonUse
case object Concerning extends PersonUse
case object Both extends PersonUse
case object Disabled extends PersonUse
def concerning: PersonUse = Concerning
def concerning: PersonUse = Concerning
def correspondent: PersonUse = Correspondent
def both: PersonUse = Both
def both: PersonUse = Both
val concerningAndBoth: NonEmptyList[PersonUse] =
NonEmptyList.of(Concerning, Both)

View File

@ -8,8 +8,7 @@ package docspell.common
import scala.concurrent.ExecutionContext
/** Captures thread pools to use in an application.
*/
/** Captures thread pools to use in an application. */
case class Pools(
connectEC: ExecutionContext,
httpClientEC: ExecutionContext,

View File

@ -20,9 +20,9 @@ sealed trait SearchMode { self: Product =>
object SearchMode {
final case object Normal extends SearchMode
final case object Normal extends SearchMode
final case object Trashed extends SearchMode
final case object All extends SearchMode
final case object All extends SearchMode
def fromString(str: String): Either[String, SearchMode] =
str.toLowerCase match {

View File

@ -49,7 +49,7 @@ object SystemCommand {
startProcess(cmd, wd, logger, stdin) { proc =>
Stream.eval {
for {
_ <- writeToProcess(stdin, proc)
_ <- writeToProcess(stdin, proc)
term <- Sync[F].blocking(proc.waitFor(cmd.timeout.seconds, TimeUnit.SECONDS))
_ <-
if (term)
@ -93,7 +93,7 @@ object SystemCommand {
)(
f: Process => Stream[F, A]
): Stream[F, A] = {
val log = logger.debug(s"Running external command: ${cmd.cmdString}")
val log = logger.debug(s"Running external command: ${cmd.cmdString}")
val hasStdin = stdin.take(1).compile.last.map(_.isDefined)
val proc = log *> hasStdin.flatMap(flag =>
Sync[F].blocking {

View File

@ -32,7 +32,7 @@ object ThreadFactories {
def ofNameFJ(prefix: String): ForkJoinWorkerThreadFactory =
new ForkJoinWorkerThreadFactory {
val tf = ForkJoinPool.defaultForkJoinWorkerThreadFactory
val tf = ForkJoinPool.defaultForkJoinWorkerThreadFactory
val counter = new AtomicLong(0)
def newThread(pool: ForkJoinPool): ForkJoinWorkerThread = {

View File

@ -27,7 +27,7 @@ trait StreamSyntax {
optStr
.map(_.trim)
.toRight(new Exception("Empty string cannot be parsed into a value"))
json <- parse(str).leftMap(_.underlying)
json <- parse(str).leftMap(_.underlying)
value <- json.as[A]
} yield value
)

View File

@ -20,7 +20,7 @@ trait StringSyntax {
def parseJsonAs[A](implicit d: Decoder[A]): Either[Throwable, A] =
for {
json <- parse(s).leftMap(_.underlying)
json <- parse(s).leftMap(_.underlying)
value <- json.as[A]
} yield value
}

View File

@ -109,12 +109,12 @@ object Conversion {
})
object Office {
val odt = MimeType.application("vnd.oasis.opendocument.text")
val ods = MimeType.application("vnd.oasis.opendocument.spreadsheet")
val odt = MimeType.application("vnd.oasis.opendocument.text")
val ods = MimeType.application("vnd.oasis.opendocument.spreadsheet")
val odtAlias = MimeType.application("x-vnd.oasis.opendocument.text")
val odsAlias = MimeType.application("x-vnd.oasis.opendocument.spreadsheet")
val msoffice = MimeType.application("x-tika-msoffice")
val ooxml = MimeType.application("x-tika-ooxml")
val ooxml = MimeType.application("x-tika-ooxml")
val docx =
MimeType.application("vnd.openxmlformats-officedocument.wordprocessingml.document")
val xlsx =

View File

@ -29,7 +29,7 @@ private[extern] object ExternConv {
.resource(File.withTempDir[F](wd, s"docspell-$name"))
.flatMap { dir =>
val inFile = dir.resolve("infile").absolute.normalize
val out = dir.resolve("out.pdf").absolute.normalize
val out = dir.resolve("out.pdf").absolute.normalize
val sysCfg =
cmdCfg.replace(
Map(

View File

@ -35,14 +35,14 @@ object Markdown {
val r = createRenderer()
Try {
val reader = new InputStreamReader(is, cs)
val doc = p.parseReader(reader)
val doc = p.parseReader(reader)
wrapHtml(r.render(doc), cfg)
}.toEither
}
def toHtml(md: String, cfg: MarkdownConfig): String = {
val p = createParser()
val r = createRenderer()
val p = createParser()
val r = createRenderer()
val doc = p.parse(md)
wrapHtml(r.render(doc), cfg)
}

View File

@ -172,7 +172,7 @@ class ConversionTest extends FunSuite with FileChecks {
.covary[IO]
.zipWithIndex
.evalMap { case (uri, index) =>
val load = uri.readURL[IO](8192)
val load = uri.readURL[IO](8192)
val dataType = DataType.filename(uri.path.segments.last)
logger.info(s"Processing file ${uri.path.asString}") *>
conv.toPDF(dataType, Language.German, handler(index))(load)

View File

@ -52,7 +52,7 @@ trait FileChecks {
case ConversionResult.SuccessPdfTxt(pdf, txt) =>
for {
pout <- pdf.through(storeFile(filePdf)).compile.lastOrError
str <- txt
str <- txt
tout <- IO(Files.write(fileTxt.toNioPath, str.getBytes(StandardCharsets.UTF_8)))
} yield (pout, File.path(tout))

View File

@ -20,7 +20,7 @@ import docspell.files.ExampleFiles
import munit._
class ExternConvTest extends FunSuite with FileChecks {
val utf8 = StandardCharsets.UTF_8
val utf8 = StandardCharsets.UTF_8
val logger = Logger.log4s[IO](org.log4s.getLogger)
val target = File.path(Paths.get("target"))

View File

@ -20,14 +20,14 @@ object ExtractResult {
case class UnsupportedFormat(mime: MimeType) extends ExtractResult {
val textOption = None
val pdfMeta = None
val pdfMeta = None
}
def unsupportedFormat(mt: MimeType): ExtractResult =
UnsupportedFormat(mt)
case class Failure(ex: Throwable) extends ExtractResult {
val textOption = None
val pdfMeta = None
val pdfMeta = None
}
def failure(ex: Throwable): ExtractResult =
Failure(ex)

View File

@ -14,8 +14,7 @@ import docspell.common._
object Ocr {
/** Extract the text of all pages in the given pdf file.
*/
/** Extract the text of all pages in the given pdf file. */
def extractPdf[F[_]: Async](
pdf: Stream[F, Byte],
logger: Logger[F],
@ -30,8 +29,7 @@ object Ocr {
.last
}
/** Extract the text from the given image file
*/
/** Extract the text from the given image file */
def extractImage[F[_]: Async](
img: Stream[F, Byte],
logger: Logger[F],
@ -79,7 +77,7 @@ object Ocr {
.copy(args = xargs)
.replace(
Map(
"{{infile}}" -> "-",
"{{infile}}" -> "-",
"{{outfile}}" -> "%d.tif"
)
)
@ -99,7 +97,7 @@ object Ocr {
): Stream[F, Path] = {
val cmd = ghostscript.replace(
Map(
"{{infile}}" -> pdf.absolute.toString,
"{{infile}}" -> pdf.absolute.toString,
"{{outfile}}" -> "%d.tif"
)
)
@ -123,7 +121,7 @@ object Ocr {
val targetFile = img.resolveSibling("u-" + img.fileName.toString).absolute
val cmd = unpaper.replace(
Map(
"{{infile}}" -> img.absolute.toString,
"{{infile}}" -> img.absolute.toString,
"{{outfile}}" -> targetFile.toString
)
)
@ -139,8 +137,7 @@ object Ocr {
}
}
/** Run tesseract on the given image file and return the extracted text.
*/
/** Run tesseract on the given image file and return the extracted text. */
private[extract] def runTesseractFile[F[_]: Async](
img: Path,
logger: Logger[F],
@ -159,8 +156,7 @@ object Ocr {
.map(_.stdout)
}
/** Run tesseract on the given image file and return the extracted text.
*/
/** Run tesseract on the given image file and return the extracted text. */
private[extract] def runTesseractStdin[F[_]: Async](
img: Stream[F, Byte],
logger: Logger[F],

View File

@ -11,9 +11,9 @@ import docspell.common.MimeType
object OcrType {
val jpeg = MimeType.jpeg
val png = MimeType.png
val png = MimeType.png
val tiff = MimeType.tiff
val pdf = MimeType.pdf
val pdf = MimeType.pdf
val all = Set(jpeg, png, tiff, pdf)

View File

@ -28,9 +28,9 @@ object OdfExtract {
def get(is: InputStream) =
Try {
val handler = new BodyContentHandler()
val pctx = new ParseContext()
val meta = new Metadata()
val handler = new BodyContentHandler()
val pctx = new ParseContext()
val meta = new Metadata()
val ooparser = new OpenDocumentParser()
ooparser.parse(is, handler, meta, pctx)
Text(Option(handler.toString))

View File

@ -10,8 +10,8 @@ import docspell.common.MimeType
object OdfType {
val odt = MimeType.application("vnd.oasis.opendocument.text")
val ods = MimeType.application("vnd.oasis.opendocument.spreadsheet")
val odt = MimeType.application("vnd.oasis.opendocument.text")
val ods = MimeType.application("vnd.oasis.opendocument.spreadsheet")
val odtAlias = MimeType.application("x-vnd.oasis.opendocument.text")
val odsAlias = MimeType.application("x-vnd.oasis.opendocument.spreadsheet")

View File

@ -30,7 +30,7 @@ object PdfboxExtract {
.withDocumentStream(data) { doc =>
(for {
txt <- readText(doc)
md <- readMetaData(doc)
md <- readMetaData(doc)
} yield (txt, Some(md).filter(_.nonEmpty))).pure[F]
}
.attempt

View File

@ -11,12 +11,12 @@ import docspell.common.MimeType
object PoiType {
val msoffice = MimeType.application("x-tika-msoffice")
val ooxml = MimeType.application("x-tika-ooxml")
val ooxml = MimeType.application("x-tika-ooxml")
val docx =
MimeType.application("vnd.openxmlformats-officedocument.wordprocessingml.document")
val xlsx = MimeType.application("vnd.openxmlformats-officedocument.spreadsheetml.sheet")
val xls = MimeType.application("vnd.ms-excel")
val doc = MimeType.application("msword")
val xls = MimeType.application("vnd.ms-excel")
val doc = MimeType.application("msword")
val all = Set(msoffice, ooxml, docx, xlsx, xls, doc)

View File

@ -22,7 +22,7 @@ class OdfExtractTest extends FunSuite {
test("test extract from odt") {
files.foreach { case (file, len) =>
val is = file.toJavaUrl.map(_.openStream()).fold(sys.error, identity)
val is = file.toJavaUrl.map(_.openStream()).fold(sys.error, identity)
val str1 = OdfExtract.get(is).fold(throw _, identity)
assertEquals(str1.length, len)

View File

@ -22,20 +22,20 @@ class PdfboxExtractTest extends FunSuite {
test("extract text from text PDFs by inputstream") {
textPDFs.foreach { case (file, txt) =>
val url = file.toJavaUrl.fold(sys.error, identity)
val str = PdfboxExtract.getText(url.openStream()).fold(throw _, identity)
val url = file.toJavaUrl.fold(sys.error, identity)
val str = PdfboxExtract.getText(url.openStream()).fold(throw _, identity)
val received = removeFormatting(str.value)
val expect = removeFormatting(txt)
val expect = removeFormatting(txt)
assertEquals(received, expect)
}
}
test("extract text from text PDFs via Stream") {
textPDFs.foreach { case (file, txt) =>
val data = file.readURL[IO](8192)
val str = PdfboxExtract.getText(data).unsafeRunSync().fold(throw _, identity)
val data = file.readURL[IO](8192)
val str = PdfboxExtract.getText(data).unsafeRunSync().fold(throw _, identity)
val received = removeFormatting(str.value)
val expect = removeFormatting(txt)
val expect = removeFormatting(txt)
assertEquals(received, expect)
}
}

View File

@ -17,10 +17,10 @@ import munit._
class PoiExtractTest extends FunSuite {
val officeFiles = List(
ExampleFiles.examples_sample_doc -> 6241,
ExampleFiles.examples_sample_doc -> 6241,
ExampleFiles.examples_sample_docx -> 6179,
ExampleFiles.examples_sample_xlsx -> 660,
ExampleFiles.examples_sample_xls -> 660
ExampleFiles.examples_sample_xls -> 660
)
test("extract text from ms office files") {

View File

@ -14,8 +14,8 @@ class RtfExtractTest extends FunSuite {
test("extract text from rtf using java input-stream") {
val file = ExampleFiles.examples_sample_rtf
val is = file.toJavaUrl.map(_.openStream()).fold(sys.error, identity)
val str = RtfExtract.get(is).fold(throw _, identity)
val is = file.toJavaUrl.map(_.openStream()).fold(sys.error, identity)
val str = RtfExtract.get(is).fold(throw _, identity)
assertEquals(str.length, 7342)
}
}

View File

@ -56,7 +56,7 @@ object ImageSize {
): Either[Throwable, Dimension] =
Try {
reader.setInput(in)
val width = reader.getWidth(reader.getMinIndex)
val width = reader.getWidth(reader.getMinIndex)
val height = reader.getHeight(reader.getMinIndex)
Dimension(width, height)
}.toEither

View File

@ -31,9 +31,9 @@ object TikaMimetype {
private def convert(mt: MediaType): MimeType =
Option(mt) match {
case Some(_) =>
val params = mt.getParameters.asScala.toMap
val params = mt.getParameters.asScala.toMap
val primary = mt.getType
val sub = mt.getSubtype
val sub = mt.getSubtype
normalize(MimeType(primary, sub, params))
case None =>
MimeType.octetStream

View File

@ -22,11 +22,11 @@ class ImageSizeTest extends FunSuite {
ExampleFiles.camera_letter_en_jpg -> Dimension(1695, 2378),
ExampleFiles.camera_letter_en_png -> Dimension(1695, 2378),
// ExampleFiles.camera_letter_en_tiff -> Dimension(1695, 2378),
ExampleFiles.scanner_jfif_jpg -> Dimension(2480, 3514),
ExampleFiles.scanner_jfif_jpg -> Dimension(2480, 3514),
ExampleFiles.bombs_20K_gray_jpeg -> Dimension(20000, 20000),
ExampleFiles.bombs_20K_gray_png -> Dimension(20000, 20000),
ExampleFiles.bombs_20K_rgb_jpeg -> Dimension(20000, 20000),
ExampleFiles.bombs_20K_rgb_png -> Dimension(20000, 20000)
ExampleFiles.bombs_20K_gray_png -> Dimension(20000, 20000),
ExampleFiles.bombs_20K_rgb_jpeg -> Dimension(20000, 20000),
ExampleFiles.bombs_20K_rgb_png -> Dimension(20000, 20000)
)
test("get sizes from input-stream") {
@ -42,7 +42,7 @@ class ImageSizeTest extends FunSuite {
test("get sizes from stream") {
files.foreach { case (uri, expect) =>
val stream = uri.readURL[IO](8192)
val dim = ImageSize.get(stream).unsafeRunSync()
val dim = ImageSize.get(stream).unsafeRunSync()
assertEquals(dim, expect.some)
}
}

View File

@ -18,7 +18,7 @@ class ZipTest extends FunSuite {
test("unzip") {
val zipFile = ExampleFiles.letters_zip.readURL[IO](8192)
val uncomp = zipFile.through(Zip.unzip(8192, Glob.all))
val uncomp = zipFile.through(Zip.unzip(8192, Glob.all))
uncomp
.evalMap { entry =>

View File

@ -28,12 +28,12 @@ object FtsMigration {
sealed trait Result
object Result {
case object WorkDone extends Result
case object WorkDone extends Result
case object ReIndexAll extends Result
case object IndexAll extends Result
case object IndexAll extends Result
def workDone: Result = WorkDone
def workDone: Result = WorkDone
def reIndexAll: Result = ReIndexAll
def indexAll: Result = IndexAll
def indexAll: Result = IndexAll
}
}

View File

@ -24,7 +24,7 @@ object FtsResult {
sealed trait MatchData
case class AttachmentData(attachId: Ident, attachName: String) extends MatchData
case object ItemData extends MatchData
case object ItemData extends MatchData
case class ItemMatch(
id: Ident,

View File

@ -20,19 +20,19 @@ object Field {
def apply(name: String): Field =
new Field(name)
val id = Field("id")
val itemId = Field("itemId")
val collectiveId = Field("collectiveId")
val attachmentId = Field("attachmentId")
val discriminator = Field("discriminator")
val id = Field("id")
val itemId = Field("itemId")
val collectiveId = Field("collectiveId")
val attachmentId = Field("attachmentId")
val discriminator = Field("discriminator")
val attachmentName = Field("attachmentName")
val content = Field("content")
val content_de = contentField(Language.German)
val content_en = contentField(Language.English)
val content_fr = contentField(Language.French)
val itemName = Field("itemName")
val itemNotes = Field("itemNotes")
val folderId = Field("folder")
val content = Field("content")
val content_de = contentField(Language.German)
val content_en = contentField(Language.English)
val content_fr = contentField(Language.French)
val itemName = Field("itemName")
val itemNotes = Field("itemNotes")
val folderId = Field("folder")
val contentLangFields = Language.all
.map(contentField)

View File

@ -77,7 +77,7 @@ trait JsonCodec {
new Decoder[VersionDoc] {
final def apply(c: HCursor): Decoder.Result[VersionDoc] =
for {
id <- c.get[String](VersionDoc.Fields.id.name)
id <- c.get[String](VersionDoc.Fields.id.name)
version <- c.get[Int](VersionDoc.Fields.currentVersion.name)
} yield VersionDoc(id, version)
}
@ -106,10 +106,10 @@ trait JsonCodec {
new Decoder[FtsResult] {
final def apply(c: HCursor): Decoder.Result[FtsResult] =
for {
qtime <- c.downField("responseHeader").get[Duration]("QTime")
count <- c.downField("response").get[Int]("numFound")
maxScore <- c.downField("response").get[Double]("maxScore")
results <- c.downField("response").get[List[FtsResult.ItemMatch]]("docs")
qtime <- c.downField("responseHeader").get[Duration]("QTime")
count <- c.downField("response").get[Int]("numFound")
maxScore <- c.downField("response").get[Double]("maxScore")
results <- c.downField("response").get[List[FtsResult.ItemMatch]]("docs")
highlightng <- c.get[Map[Ident, Map[String, List[String]]]]("highlighting")
highlight = highlightng.map(kv => kv._1 -> kv._2.values.flatten.toList)
} yield FtsResult(qtime, count, maxScore, highlight, results)
@ -120,10 +120,10 @@ trait JsonCodec {
final def apply(c: HCursor): Decoder.Result[FtsResult.ItemMatch] =
for {
itemId <- c.get[Ident](Field.itemId.name)
id <- c.get[Ident](Field.id.name)
coll <- c.get[Ident](Field.collectiveId.name)
score <- c.get[Double]("score")
md <- decodeMatchData(c)
id <- c.get[Ident](Field.id.name)
coll <- c.get[Ident](Field.collectiveId.name)
score <- c.get[Double]("score")
md <- decodeMatchData(c)
} yield FtsResult.ItemMatch(id, itemId, coll, score, md)
}
@ -135,7 +135,7 @@ trait JsonCodec {
md <-
if ("attachment" == disc)
for {
aId <- c.get[Ident](Field.attachmentId.name)
aId <- c.get[Ident](Field.attachmentId.name)
aName <- c.get[String](Field.attachmentName.name)
} yield FtsResult.AttachmentData(aId, aName)
else Right(FtsResult.ItemData)

View File

@ -26,11 +26,11 @@ final case class QueryData(
def withHighLight(fields: List[Field], pre: String, post: String): QueryData =
copy(params =
params ++ Map(
"hl" -> "on",
"hl" -> "on",
"hl.requireFieldMatch" -> "true",
"hl.fl" -> fields.map(_.name).mkString(","),
"hl.simple.pre" -> pre,
"hl.simple.post" -> post
"hl.fl" -> fields.map(_.name).mkString(","),
"hl.simple.pre" -> pre,
"hl.simple.post" -> post
)
)
}
@ -46,9 +46,9 @@ object QueryData {
fields: List[Field],
fq: FtsQuery
): QueryData = {
val q = sanitize(fq.q)
val extQ = search.map(f => s"${f.name}:($q)").mkString(" OR ")
val items = fq.items.map(_.id).mkString(" ")
val q = sanitize(fq.q)
val extQ = search.map(f => s"${f.name}:($q)").mkString(" OR ")
val items = fq.items.map(_.id).mkString(" ")
val folders = fq.folders.map(_.id).mkString(" ")
val filterQ = List(
s"""${Field.collectiveId.name}:"${fq.collective.id}"""",

View File

@ -53,9 +53,9 @@ final class SolrFtsClient[F[_]: Async](
f: List[TextData] => F[Unit]
): F[Unit] =
(for {
_ <- Stream.eval(logger.debug("Updating SOLR index"))
_ <- Stream.eval(logger.debug("Updating SOLR index"))
chunks <- data.chunks
res <- Stream.eval(f(chunks.toList).attempt)
res <- Stream.eval(f(chunks.toList).attempt)
_ <- res match {
case Right(()) => Stream.emit(())
case Left(ex) =>

View File

@ -69,7 +69,7 @@ object SolrQuery {
Field("current_version_i")
)
val query = QueryData(s"id:$id", "", 1, 0, fields, Map.empty)
val req = Method.POST(query.asJson, url)
val req = Method.POST(query.asJson, url)
client.expect[Option[VersionDoc]](req)
}
}

View File

@ -60,8 +60,8 @@ object SolrSetup {
val verDoc = VersionDoc(versionDocId, allMigrations.map(_.value.version).max)
val solrUp = SolrUpdate(cfg, client)
val writeVersion = SolrMigration.writeVersion(solrUp, verDoc)
val deleteAll = SolrMigration.deleteData(0, solrUp)
val indexAll = SolrMigration.indexAll[F](Int.MaxValue, "Index all data")
val deleteAll = SolrMigration.deleteData(0, solrUp)
val indexAll = SolrMigration.indexAll[F](Int.MaxValue, "Index all data")
deleteAll :: (allMigrations
.filter(_.isSchemaChange) ::: List(indexAll, writeVersion))

View File

@ -79,7 +79,7 @@ object SolrUpdate {
for {
docIds <- client.expect[DocIdResult](searchReq)
sets = docIds.toSetFolder(folder)
req = Method.POST(sets.asJson, url)
req = Method.POST(sets.asJson, url)
_ <- client.expect[Unit](req)
} yield ()
}

Some files were not shown because too many files have changed in this diff Show More