mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-04-05 10:59:33 +00:00
commit
05843876cc
@ -1,4 +1,4 @@
|
||||
version = "3.0.6"
|
||||
version = "3.0.8"
|
||||
|
||||
preset = default
|
||||
align.preset = some
|
||||
|
@ -14,7 +14,7 @@ def inTest(d0: Seq[ModuleID], ds: Seq[ModuleID]*) =
|
||||
|
||||
val scalafixSettings = Seq(
|
||||
semanticdbEnabled := true, // enable SemanticDB
|
||||
semanticdbVersion := scalafixSemanticdb.revision, //"4.4.0"
|
||||
semanticdbVersion := scalafixSemanticdb.revision, // "4.4.0"
|
||||
ThisBuild / scalafixDependencies ++= Dependencies.organizeImports
|
||||
)
|
||||
|
||||
@ -781,7 +781,7 @@ val restserver = project
|
||||
Universal / mappings := {
|
||||
val allMappings = (Universal / mappings).value
|
||||
allMappings.filter {
|
||||
//scalajs artifacts are not needed at runtime
|
||||
// scalajs artifacts are not needed at runtime
|
||||
case (file, name) => !name.contains("_sjs1_")
|
||||
}
|
||||
}
|
||||
|
@ -53,8 +53,8 @@ object Properties {
|
||||
"ner.statisticalOnly" -> "true",
|
||||
"ner.rulesOnly" -> "false",
|
||||
"ner.applyFineGrained" -> "false",
|
||||
"ner.applyNumericClassifiers" -> "false", //only english supported, not needed currently
|
||||
"ner.useSUTime" -> "false", //only english, unused in docspell
|
||||
"ner.applyNumericClassifiers" -> "false", // only english supported, not needed currently
|
||||
"ner.useSUTime" -> "false", // only english, unused in docspell
|
||||
"ner.language" -> "de",
|
||||
"ner.model" -> "edu/stanford/nlp/models/ner/german.distsim.crf.ser.gz,edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz"
|
||||
).withRegexNer(regexNerMappingFile).withHighRecall(highRecall)
|
||||
|
@ -50,7 +50,7 @@ object JobState {
|
||||
NonEmptyList.of(Waiting, Scheduled, Running, Stuck, Failed, Cancelled, Success)
|
||||
val queued: Set[JobState] = Set(Waiting, Scheduled, Stuck)
|
||||
val done: NonEmptyList[JobState] = NonEmptyList.of(Failed, Cancelled, Success)
|
||||
val notDone: NonEmptyList[JobState] = //all - done
|
||||
val notDone: NonEmptyList[JobState] = // all - done
|
||||
NonEmptyList.of(Waiting, Scheduled, Running, Stuck)
|
||||
val inProgress: Set[JobState] = Set(Scheduled, Running, Stuck)
|
||||
|
||||
|
@ -168,7 +168,7 @@ object MimeType {
|
||||
left
|
||||
}
|
||||
|
||||
//https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.6
|
||||
// https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.6
|
||||
private def isToken(s: String): Boolean =
|
||||
s.nonEmpty && s.forall(c => c.isLetterOrDigit || tokenExtraChars.contains(c))
|
||||
|
||||
@ -186,7 +186,7 @@ object MimeType {
|
||||
seq(primary, sub)((p, s) => MimeType(p.toLowerCase, s.toLowerCase, None))
|
||||
}
|
||||
|
||||
//https://datatracker.ietf.org/doc/html/rfc2046#section-4.1.2
|
||||
// https://datatracker.ietf.org/doc/html/rfc2046#section-4.1.2
|
||||
private val charset: P[Option[Charset]] = in =>
|
||||
in.trim.toLowerCase.indexOf("charset=") match {
|
||||
case -1 => Right((None, in))
|
||||
|
@ -44,7 +44,7 @@ object PdfExtract {
|
||||
s"Using stripped text (not OCR), as it is longer (${strippedRes._1.length} > ${ocrStr.length})"
|
||||
) *> Result(strippedRes).pure[F]
|
||||
|
||||
//maybe better: inspect the pdf and decide whether ocr or not
|
||||
// maybe better: inspect the pdf and decide whether ocr or not
|
||||
for {
|
||||
pdfboxRes <-
|
||||
logger.debug("Trying to strip text from pdf using pdfbox.") *>
|
||||
|
@ -16,8 +16,8 @@ import munit._
|
||||
|
||||
class ImageSizeTest extends FunSuite {
|
||||
|
||||
//tiff files are not supported on the jdk by default
|
||||
//requires an external library
|
||||
// tiff files are not supported on the jdk by default
|
||||
// requires an external library
|
||||
val files = List(
|
||||
ExampleFiles.camera_letter_en_jpg -> Dimension(1695, 2378),
|
||||
ExampleFiles.camera_letter_en_png -> Dimension(1695, 2378),
|
||||
|
@ -15,8 +15,8 @@ object Playing extends IOApp {
|
||||
|
||||
def run(args: List[String]): IO[ExitCode] =
|
||||
IO {
|
||||
//val ods = ExampleFiles.examples_sample_ods.readURL[IO](8192, blocker)
|
||||
//val odt = ExampleFiles.examples_sample_odt.readURL[IO](8192, blocker)
|
||||
// val ods = ExampleFiles.examples_sample_ods.readURL[IO](8192, blocker)
|
||||
// val odt = ExampleFiles.examples_sample_odt.readURL[IO](8192, blocker)
|
||||
val rtf = ExampleFiles.examples_sample_rtf.readURL[IO](8192)
|
||||
|
||||
val x = for {
|
||||
|
@ -46,7 +46,7 @@ object RegexNerFile {
|
||||
final private class Impl[F[_]: Async](
|
||||
cfg: Config,
|
||||
store: Store[F],
|
||||
writer: Semaphore[F] //TODO allow parallelism per collective
|
||||
writer: Semaphore[F] // TODO allow parallelism per collective
|
||||
) extends RegexNerFile[F] {
|
||||
|
||||
def makeFile(collective: Ident): F[Option[Path]] =
|
||||
|
@ -143,7 +143,7 @@ object CreateItem {
|
||||
.fromOption[F](NonEmptyList.fromList(fileMetaIds.toList))
|
||||
.flatMap(fids =>
|
||||
OptionT(
|
||||
//load attachments but only those mentioned in the task's arguments
|
||||
// load attachments but only those mentioned in the task's arguments
|
||||
cand.headOption.traverse(ri =>
|
||||
ctx.store
|
||||
.transact(RAttachment.findByItemCollectiveSource(ri.id, ri.cid, fids))
|
||||
@ -195,7 +195,7 @@ object CreateItem {
|
||||
ctx.logger.error(msg) *> Sync[F].raiseError(new Exception(msg))
|
||||
}
|
||||
|
||||
//TODO if no source is present, it must be saved!
|
||||
// TODO if no source is present, it must be saved!
|
||||
private def originFileTuple(
|
||||
t: (RAttachment, Option[RAttachmentSource])
|
||||
): (Ident, Ident) =
|
||||
|
@ -60,7 +60,7 @@ object EvalProposals {
|
||||
): Double =
|
||||
mp.proposalType match {
|
||||
case MetaProposalType.DueDate =>
|
||||
//for due dates, sort earliest on top
|
||||
// for due dates, sort earliest on top
|
||||
MetaProposal
|
||||
.parseDate(cand)
|
||||
.map { ld =>
|
||||
|
@ -108,9 +108,9 @@ object ReProcessItem {
|
||||
data.item.cid,
|
||||
args.itemId.some,
|
||||
lang,
|
||||
None, //direction
|
||||
data.item.source, //source-id
|
||||
None, //folder
|
||||
None, // direction
|
||||
data.item.source, // source-id
|
||||
None, // folder
|
||||
Seq.empty,
|
||||
false,
|
||||
None,
|
||||
|
@ -162,7 +162,7 @@ object ScanMailboxTask {
|
||||
|
||||
def requireFolder[C](a: Access[F, C])(name: String): MailOp[F, C, MailFolder] =
|
||||
if ("INBOX".equalsIgnoreCase(name)) a.getInbox
|
||||
else //TODO resolve sub-folders
|
||||
else // TODO resolve sub-folders
|
||||
a.findFolder(None, name)
|
||||
.map(_.toRight(new Exception(s"Folder '$name' not found")))
|
||||
.mapF(_.rethrow)
|
||||
|
@ -227,7 +227,7 @@ final class SchedulerImpl[F[_]: Async](
|
||||
job.id,
|
||||
config.name,
|
||||
store
|
||||
) //also increments retries if current state=stuck
|
||||
) // also increments retries if current state=stuck
|
||||
|
||||
def wrapTask(
|
||||
job: RJob,
|
||||
|
@ -26,7 +26,7 @@ class NerFileTest extends FunSuite {
|
||||
|
||||
for ((name, first) <- names) {
|
||||
val ps = Pattern(1)(name).distinct
|
||||
//check if it compiles to a regex pattern
|
||||
// check if it compiles to a regex pattern
|
||||
ps.flatMap(_.value.split("\\s+").toList).foreach(_.r)
|
||||
ps.foreach(_.value.r)
|
||||
|
||||
|
@ -59,7 +59,7 @@ class JsonMiniQueryTest extends FunSuite with Fixtures {
|
||||
assertEquals(r(sampleEvent), Vector.empty)
|
||||
}
|
||||
|
||||
//content.[added,removed].(category=expense & name=grocery)
|
||||
// content.[added,removed].(category=expense & name=grocery)
|
||||
test("combine fields and filter") {
|
||||
val andOk = JQ.at("content").at("added", "removed") >>
|
||||
(JQ.at("name").is("grocery") && JQ.at("category").is("expense"))
|
||||
@ -156,6 +156,6 @@ class JsonMiniQueryTest extends FunSuite with Fixtures {
|
||||
val json4 = parseJson(
|
||||
"""[{"name":"max", "count":4}, {"name":"me", "count": 3}, {"name":"max", "count": 3}]"""
|
||||
)
|
||||
println(q4(json4))
|
||||
assertEquals(q4(json4), values("max", "max"))
|
||||
}
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ final class NaivePubSub[F[_]: Async](
|
||||
} yield head
|
||||
|
||||
def publish(topic: Topic): Pipe[F, Json, MessageHead] =
|
||||
ms => //TODO Do some optimization by grouping messages to the same topic
|
||||
ms => // TODO Do some optimization by grouping messages to the same topic
|
||||
ms.evalMap(publish0(topic, _))
|
||||
|
||||
def subscribe(topics: NonEmptyList[Topic]): Stream[F, Message[Json]] =
|
||||
|
@ -58,7 +58,7 @@ class NaivePubSubTest extends CatsEffectSuite with Fixtures {
|
||||
res <- subscribe(ps, Topics.jobSubmitted)
|
||||
(received, halt, subFiber) = res
|
||||
_ <- ps.publish1(otherTopic, JobSubmittedMsg("hello".id))
|
||||
_ <- IO.sleep(100.millis) //allow some time for receiving
|
||||
_ <- IO.sleep(100.millis) // allow some time for receiving
|
||||
_ <- halt.set(true)
|
||||
outcome <- subFiber.join
|
||||
_ = assert(outcome.isSuccess)
|
||||
|
@ -113,7 +113,7 @@ object ExprString {
|
||||
case Expr.NamesMacro(name) =>
|
||||
s"${C.names}:${quote(name)}"
|
||||
case Expr.YearMacro(_, year) =>
|
||||
s"${C.year}:$year" //currently, only for Attr.Date
|
||||
s"${C.year}:$year" // currently, only for Attr.Date
|
||||
case Expr.ConcMacro(term) =>
|
||||
s"${C.conc}:${quote(term)}"
|
||||
case Expr.CorrMacro(term) =>
|
||||
|
@ -236,9 +236,9 @@ trait Conversions {
|
||||
i.concPerson.map(mkIdName),
|
||||
i.concEquip.map(mkIdName),
|
||||
i.folder.map(mkIdName),
|
||||
Nil, //attachments
|
||||
Nil, //tags
|
||||
Nil, //customfields
|
||||
Nil, // attachments
|
||||
Nil, // tags
|
||||
Nil, // customfields
|
||||
i.notes,
|
||||
Nil // highlight
|
||||
)
|
||||
|
@ -141,7 +141,7 @@ object ContentDisposition {
|
||||
}
|
||||
|
||||
private val parser = makeParser(mimeValue)
|
||||
//private val origParser = makeParser(Rfc7230.token | Rfc7230.quotedString)
|
||||
// private val origParser = makeParser(Rfc7230.token | Rfc7230.quotedString)
|
||||
|
||||
implicit val headerInstance: Header[ContentDisposition, Header.Single] = {
|
||||
val oh = `Content-Disposition`.headerInstance
|
||||
|
@ -140,9 +140,9 @@ object IntegrationEndpointRoutes {
|
||||
cfg: Config.IntegrationEndpoint.AllowedIps
|
||||
): HttpRoutes[F] =
|
||||
HttpRoutes { req =>
|
||||
//The `req.from' take the X-Forwarded-For header into account,
|
||||
//which is not desirable here. The `http-header' auth config
|
||||
//can be used to authenticate based on headers.
|
||||
// The `req.from' take the X-Forwarded-For header into account,
|
||||
// which is not desirable here. The `http-header' auth config
|
||||
// can be used to authenticate based on headers.
|
||||
val from = req.remote.map(_.host)
|
||||
if (from.exists(cfg.containsAddress)) OptionT.none[F, Response[F]]
|
||||
else OptionT.pure(Responses.forbidden[F])
|
||||
|
@ -81,7 +81,7 @@ trait MigrationTasks {
|
||||
|
||||
def mkTransactor(ctx: Context): Transactor[IO] = {
|
||||
val xa = Transactor.fromConnection[IO](ctx.getConnection())
|
||||
Transactor.strategy.set(xa, Strategy.void) //transactions are handled by flyway
|
||||
Transactor.strategy.set(xa, Strategy.void) // transactions are handled by flyway
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ object QJob {
|
||||
retryPause: Duration,
|
||||
currentTry: Int
|
||||
): F[Either[Unit, Option[RJob]]] = {
|
||||
//if this fails, we have to restart takeNextJob
|
||||
// if this fails, we have to restart takeNextJob
|
||||
def markJob(job: RJob): F[Either[Unit, RJob]] =
|
||||
store.transact(for {
|
||||
n <- RJob.setScheduled(job.id, worker)
|
||||
@ -236,7 +236,7 @@ object QJob {
|
||||
val JC = RJob.T
|
||||
val waiting = NonEmptyList.of(JobState.Waiting, JobState.Stuck, JobState.Scheduled)
|
||||
val running = NonEmptyList.of(JobState.Running)
|
||||
//val done = JobState.all.filterNot(js => ).diff(waiting).diff(running)
|
||||
// val done = JobState.all.filterNot(js => ).diff(waiting).diff(running)
|
||||
|
||||
def selectJobs(now: Timestamp): Stream[ConnectionIO, RJob] = {
|
||||
val refDate = now.minusHours(24)
|
||||
|
@ -20,7 +20,7 @@ import doobie.implicits._
|
||||
* a 0..1-1 relationship.
|
||||
*/
|
||||
case class RAttachmentArchive(
|
||||
id: Ident, //same as RAttachment.id
|
||||
id: Ident, // same as RAttachment.id
|
||||
fileId: Ident,
|
||||
name: Option[String],
|
||||
messageId: Option[String],
|
||||
|
@ -17,7 +17,7 @@ import doobie._
|
||||
import doobie.implicits._
|
||||
|
||||
case class RAttachmentMeta(
|
||||
id: Ident, //same as RAttachment.id
|
||||
id: Ident, // same as RAttachment.id
|
||||
content: Option[String],
|
||||
nerlabels: List[NerLabel],
|
||||
proposals: MetaProposalList,
|
||||
|
@ -19,7 +19,7 @@ import doobie.implicits._
|
||||
* 1-1 (or 0..1-1) relationship.
|
||||
*/
|
||||
case class RAttachmentPreview(
|
||||
id: Ident, //same as RAttachment.id
|
||||
id: Ident, // same as RAttachment.id
|
||||
fileId: Ident,
|
||||
name: Option[String],
|
||||
created: Timestamp
|
||||
|
@ -19,7 +19,7 @@ import doobie.implicits._
|
||||
* 1-1 (or 0..1-1) relationship.
|
||||
*/
|
||||
case class RAttachmentSource(
|
||||
id: Ident, //same as RAttachment.id
|
||||
id: Ident, // same as RAttachment.id
|
||||
fileId: Ident,
|
||||
name: Option[String],
|
||||
created: Timestamp
|
||||
|
@ -120,7 +120,7 @@ object RCollective {
|
||||
cs.listType.s,
|
||||
es.schedule.s,
|
||||
es.minAge.s,
|
||||
const(0) //dummy value to load Nil as list of passwords
|
||||
const(0) // dummy value to load Nil as list of passwords
|
||||
),
|
||||
from(c).leftJoin(cs, cs.cid === c.id).leftJoin(es, es.cid === c.id),
|
||||
c.id === coll
|
||||
|
@ -15,7 +15,7 @@ object Dependencies {
|
||||
val DoobieVersion = "1.0.0-RC1"
|
||||
val EmilVersion = "0.10.0-M3"
|
||||
val FlexmarkVersion = "0.62.2"
|
||||
val FlywayVersion = "8.2.0"
|
||||
val FlywayVersion = "8.2.1"
|
||||
val Fs2Version = "3.2.3"
|
||||
val Fs2CronVersion = "0.7.1"
|
||||
val H2Version = "1.4.200"
|
||||
@ -288,7 +288,8 @@ object Dependencies {
|
||||
// https://github.com/flyway/flyway
|
||||
// ASL 2.0
|
||||
val flyway = Seq(
|
||||
"org.flywaydb" % "flyway-core" % FlywayVersion
|
||||
"org.flywaydb" % "flyway-core" % FlywayVersion,
|
||||
"org.flywaydb" % "flyway-mysql" % FlywayVersion
|
||||
)
|
||||
|
||||
val yamusca = Seq(
|
||||
|
Loading…
x
Reference in New Issue
Block a user