Update scalafmt settings

This commit is contained in:
eikek
2021-09-22 17:23:24 +02:00
parent c37f1d7c31
commit 9013f2de5b
277 changed files with 1579 additions and 1615 deletions

View File

@ -56,7 +56,7 @@ final class JoexAppImpl[F[_]: Async](
) extends JoexApp[F] {
def init: F[Unit] = {
val run = scheduler.start.compile.drain
val run = scheduler.start.compile.drain
val prun = periodicScheduler.start.compile.drain
for {
_ <- scheduleBackgroundTasks
@ -122,19 +122,19 @@ object JoexAppImpl {
for {
httpClient <- BlazeClientBuilder[F](clientEC).resource
client = JoexClient(httpClient)
store <- Store.create(cfg.jdbc, cfg.files.chunkSize, connectEC)
queue <- JobQueue(store)
pstore <- PeriodicTaskStore.create(store)
nodeOps <- ONode(store)
joex <- OJoex(client, store)
upload <- OUpload(store, queue, joex)
fts <- createFtsClient(cfg)(httpClient)
createIndex <- CreateIndex.resource(fts, store)
itemOps <- OItem(store, fts, createIndex, queue, joex)
store <- Store.create(cfg.jdbc, cfg.files.chunkSize, connectEC)
queue <- JobQueue(store)
pstore <- PeriodicTaskStore.create(store)
nodeOps <- ONode(store)
joex <- OJoex(client, store)
upload <- OUpload(store, queue, joex)
fts <- createFtsClient(cfg)(httpClient)
createIndex <- CreateIndex.resource(fts, store)
itemOps <- OItem(store, fts, createIndex, queue, joex)
itemSearchOps <- OItemSearch(store)
analyser <- TextAnalyser.create[F](cfg.textAnalysis.textAnalysisConfig)
regexNer <- RegexNerFile(cfg.textAnalysis.regexNerFileConfig, store)
updateCheck <- UpdateCheck.resource(httpClient)
analyser <- TextAnalyser.create[F](cfg.textAnalysis.textAnalysisConfig)
regexNer <- RegexNerFile(cfg.textAnalysis.regexNerFileConfig, store)
updateCheck <- UpdateCheck.resource(httpClient)
javaEmil =
JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug))
sch <- SchedulerBuilder(cfg.scheduler, store)

View File

@ -31,7 +31,7 @@ object JoexServer {
def stream[F[_]: Async](cfg: Config, pools: Pools): Stream[F, Nothing] = {
val app = for {
signal <- Resource.eval(SignallingRef[F, Boolean](false))
signal <- Resource.eval(SignallingRef[F, Boolean](false))
exitCode <- Resource.eval(Ref[F].of(ExitCode.Success))
joexApp <-
JoexAppImpl
@ -39,7 +39,7 @@ object JoexServer {
httpApp = Router(
"/api/info" -> InfoRoutes(cfg),
"/api/v1" -> JoexRoutes(joexApp)
"/api/v1" -> JoexRoutes(joexApp)
).orNotFound
// With Middlewares in place

View File

@ -22,8 +22,7 @@ import docspell.store.records.RPerson
import io.circe.syntax._
import org.log4s.getLogger
/** Maintains a custom regex-ner file per collective for stanford's regexner annotator.
*/
/** Maintains a custom regex-ner file per collective for stanford's regexner annotator. */
trait RegexNerFile[F[_]] {
def makeFile(collective: Ident): F[Option[Path]]
@ -40,7 +39,7 @@ object RegexNerFile {
store: Store[F]
): Resource[F, RegexNerFile[F]] =
for {
dir <- File.withTempDir[F](cfg.directory, "regexner-")
dir <- File.withTempDir[F](cfg.directory, "regexner-")
writer <- Resource.eval(Semaphore(1))
} yield new Impl[F](cfg.copy(directory = dir), store, writer)
@ -56,7 +55,7 @@ object RegexNerFile {
def doMakeFile(collective: Ident): F[Option[Path]] =
for {
now <- Timestamp.current[F]
now <- Timestamp.current[F]
existing <- NerFile.find[F](collective, cfg.directory)
result <- existing match {
case Some(nf) =>

View File

@ -48,7 +48,7 @@ object EmptyTrashTask {
s"Starting removing all soft-deleted items older than ${maxDate.asString}"
)
nDeleted <- deleteAll(ctx.args, maxDate, itemOps, itemSearchOps, ctx)
_ <- ctx.logger.info(s"Finished deleting $nDeleted items")
_ <- ctx.logger.info(s"Finished deleting $nDeleted items")
} yield ()
}

View File

@ -30,7 +30,7 @@ object MigrationTask {
Task(ctx =>
for {
migs <- migrationTasks[F](fts)
res <- Migration[F](cfg, fts, ctx.store, createIndex, ctx.logger).run(migs)
res <- Migration[F](cfg, fts, ctx.store, createIndex, ctx.logger).run(migs)
} yield res
)
)
@ -40,7 +40,7 @@ object MigrationTask {
def job[F[_]: Sync]: F[RJob] =
for {
id <- Ident.randomId[F]
id <- Ident.randomId[F]
now <- Timestamp.current[F]
} yield RJob.newJob(
id,

View File

@ -19,7 +19,7 @@ object ReIndexTask {
type Args = ReIndexTaskArgs
val taskName = ReIndexTaskArgs.taskName
val tracker = DocspellSystem.migrationTaskTracker
val tracker = DocspellSystem.migrationTaskTracker
def apply[F[_]: Async](
cfg: Config.FullTextSearch,

View File

@ -10,8 +10,7 @@ import cats.data.Kleisli
package object fts {
/** Some work that must be done to advance the schema of the fulltext index.
*/
/** Some work that must be done to advance the schema of the fulltext index. */
type FtsWork[F[_]] = Kleisli[F, FtsContext[F], Unit]
}

View File

@ -32,7 +32,7 @@ object NotifyDueItemsTask {
def apply[F[_]: Sync](cfg: MailSendConfig, emil: Emil[F]): Task[F, Args, Unit] =
Task { ctx =>
for {
_ <- ctx.logger.info("Getting mail configuration")
_ <- ctx.logger.info("Getting mail configuration")
mailCfg <- getMailSettings(ctx)
_ <- ctx.logger.info(
s"Searching for items due in ${ctx.args.remindDays} days…."
@ -42,7 +42,7 @@ object NotifyDueItemsTask {
for {
_ <- ctx.logger.info(s"Sending notification mail to ${ctx.args.recipients}")
res <- emil(mailCfg.toMailConfig).send(mail).map(_.head)
_ <- ctx.logger.info(s"Sent mail with id: $res")
_ <- ctx.logger.info(s"Sent mail with id: $res")
} yield ()
}
.getOrElseF(ctx.logger.info("No items found"))
@ -72,7 +72,7 @@ object NotifyDueItemsTask {
): OptionT[F, Mail[F]] =
for {
items <- OptionT.liftF(findItems(ctx)).filter(_.nonEmpty)
mail <- OptionT.liftF(makeMail(sendCfg, cfg, ctx.args, items))
mail <- OptionT.liftF(makeMail(sendCfg, cfg, ctx.args, items))
} yield mail
def findItems[F[_]: Sync](ctx: Context[F, Args]): F[Vector[ListItem]] =

View File

@ -64,7 +64,7 @@ object AllPageCountTask {
def job[F[_]: Sync]: F[RJob] =
for {
id <- Ident.randomId[F]
id <- Ident.randomId[F]
now <- Timestamp.current[F]
} yield RJob.newJob(
id,

View File

@ -58,7 +58,7 @@ object ConvertAllPdfTask {
def mkJob(ra: RAttachment): F[RJob] =
for {
id <- Ident.randomId[F]
id <- Ident.randomId[F]
now <- Timestamp.current[F]
} yield RJob.newJob(
id,

View File

@ -39,9 +39,9 @@ object PdfConvTask {
def apply[F[_]: Async](cfg: Config): Task[F, Args, Unit] =
Task { ctx =>
for {
_ <- ctx.logger.info(s"Converting pdf file ${ctx.args} using ocrmypdf")
_ <- ctx.logger.info(s"Converting pdf file ${ctx.args} using ocrmypdf")
meta <- checkInputs(cfg, ctx)
_ <- meta.traverse(fm => convert(cfg, ctx, fm))
_ <- meta.traverse(fm => convert(cfg, ctx, fm))
} yield ()
}
@ -92,7 +92,7 @@ object PdfConvTask {
ctx: Context[F, Args],
in: RFileMeta
): F[Unit] = {
val fs = ctx.store.fileStore
val fs = ctx.store.fileStore
val data = fs.getBytes(in.id)
val storeResult: ConversionResult.Handler[F, Unit] =
@ -125,7 +125,7 @@ object PdfConvTask {
for {
lang <- getLanguage(ctx)
_ <- ocrMyPdf(lang)
_ <- ocrMyPdf(lang)
} yield ()
}

View File

@ -25,7 +25,7 @@ object MakePreviewTask {
def apply[F[_]: Sync](pcfg: PreviewConfig): Task[F, Args, Unit] =
Task { ctx =>
for {
exists <- previewExists(ctx)
exists <- previewExists(ctx)
preview <- PdfboxPreview(pcfg)
_ <-
if (exists)

View File

@ -80,7 +80,7 @@ object ConvertPdf {
Conversion.create[F](cfg, sanitizeHtml, ctx.logger).use { conv =>
mime match {
case mt =>
val data = ctx.store.fileStore.getBytes(ra.fileId)
val data = ctx.store.fileStore.getBytes(ra.fileId)
val handler = conversionHandler[F](ctx, cfg, ra, item)
ctx.logger.info(s"Converting file ${ra.name} (${mime.asString}) into a PDF") *>
conv.toPDF(DataType(mt), ctx.args.meta.language, handler)(

View File

@ -17,8 +17,7 @@ import docspell.joex.scheduler.{Context, Task}
import docspell.store.queries.QItem
import docspell.store.records._
/** Task that creates the item.
*/
/** Task that creates the item. */
object CreateItem {
def apply[F[_]: Sync]: Task[F, ProcessItemArgs, ItemData] =
@ -96,14 +95,14 @@ object CreateItem {
for {
time <- Duration.stopTime[F]
it <- loadItemOrInsertNew
_ <- if (it._1 != 1) storeItemError[F](ctx) else ().pure[F]
now <- Timestamp.current[F]
fm <- fileMetas(it._2.id, now)
k <- fm.traverse(insertAttachment(ctx))
_ <- logDifferences(ctx, fm, k.sum)
dur <- time
_ <- ctx.logger.info(s"Creating item finished in ${dur.formatExact}")
it <- loadItemOrInsertNew
_ <- if (it._1 != 1) storeItemError[F](ctx) else ().pure[F]
now <- Timestamp.current[F]
fm <- fileMetas(it._2.id, now)
k <- fm.traverse(insertAttachment(ctx))
_ <- logDifferences(ctx, fm, k.sum)
dur <- time
_ <- ctx.logger.info(s"Creating item finished in ${dur.formatExact}")
} yield ItemData(
it._2,
fm,
@ -127,7 +126,7 @@ object CreateItem {
private def findExisting[F[_]: Sync]: Task[F, ProcessItemArgs, Option[ItemData]] =
Task { ctx =>
val states = ItemState.invalidStates
val states = ItemState.invalidStates
val fileMetaIds = ctx.args.files.map(_.fileMetaId).toSet
for {
cand <- ctx.store.transact(QItem.findByFileIds(fileMetaIds.toSeq, states))

View File

@ -24,9 +24,9 @@ object CrossCheckProposals {
def apply[F[_]: Sync](data: ItemData): Task[F, ProcessItemArgs, ItemData] =
Task { ctx =>
val proposals = data.finalProposals
val corrOrg = proposals.find(MetaProposalType.CorrOrg)
val corrOrg = proposals.find(MetaProposalType.CorrOrg)
(for {
orgRef <- OptionT.fromOption[F](corrOrg)
orgRef <- OptionT.fromOption[F](corrOrg)
persRefs <- OptionT.liftF(EvalProposals.findOrganizationRelation(data, ctx))
clProps <- OptionT.liftF(
personOrgCheck[F](ctx.logger, data.classifyProposals, persRefs)(orgRef)

View File

@ -36,7 +36,7 @@ object DuplicateCheck {
def removeDuplicates[F[_]: Sync](ctx: Context[F, Args]): F[ProcessItemArgs] =
for {
fileMetas <- findDuplicates(ctx)
_ <- fileMetas.traverse(deleteDuplicate(ctx))
_ <- fileMetas.traverse(deleteDuplicate(ctx))
ids = fileMetas.filter(_.exists).map(_.fm.id).toSet
} yield ctx.args.copy(files =
ctx.args.files.filterNot(f => ids.contains(f.fileMetaId))
@ -61,7 +61,7 @@ object DuplicateCheck {
): F[Vector[FileMetaDupes]] =
ctx.store.transact(for {
fileMetas <- RFileMeta.findByIds(ctx.args.files.map(_.fileMetaId))
dupes <- fileMetas.traverse(checkDuplicate(ctx))
dupes <- fileMetas.traverse(checkDuplicate(ctx))
} yield dupes)
private def checkDuplicate[F[_]](

View File

@ -22,7 +22,7 @@ object EvalProposals {
def apply[F[_]: Sync](data: ItemData): Task[F, ProcessItemArgs, ItemData] =
Task { ctx =>
for {
now <- Timestamp.current[F]
now <- Timestamp.current[F]
personRefs <- findOrganizationRelation[F](data, ctx)
metas = data.metas.map(calcCandidateWeight(now.toUtcDate, personRefs))
} yield data.copy(metas = metas)
@ -71,10 +71,10 @@ object EvalProposals {
}
.getOrElse(2000.0)
case _ =>
val textLen = rm.content.map(_.length).getOrElse(0)
val textLen = rm.content.map(_.length).getOrElse(0)
val tagCount = cand.origin.size.toDouble
val pos = cand.origin.map(_.startPosition).min
val words = cand.origin.map(_.label.split(' ').length).max.toDouble
val pos = cand.origin.map(_.startPosition).min
val words = cand.origin.map(_.label.split(' ').length).max.toDouble
val nerFac =
cand.origin.map(label => nerTagFactor(label.tag, mp.proposalType)).min
val corrPerFac = corrOrgPersonFactor(rm, mp, personRefs, cand)

View File

@ -136,7 +136,7 @@ object ExtractArchive {
archive: Option[RAttachmentArchive]
)(ra: RAttachment, pos: Int): F[Extracted] = {
val zipData = ctx.store.fileStore.getBytes(ra.fileId)
val glob = ctx.args.meta.fileFilter.getOrElse(Glob.all)
val glob = ctx.args.meta.fileFilter.getOrElse(Glob.all)
ctx.logger.debug(s"Filtering zip entries with '${glob.asString}'") *>
zipData
.through(Zip.unzipP[F](8192, glob))
@ -153,7 +153,7 @@ object ExtractArchive {
)(ra: RAttachment, pos: Int): F[Extracted] = {
val email: Stream[F, Byte] = ctx.store.fileStore.getBytes(ra.fileId)
val glob = ctx.args.meta.fileFilter.getOrElse(Glob.all)
val glob = ctx.args.meta.fileFilter.getOrElse(Glob.all)
val attachOnly = ctx.args.meta.attachmentsOnly.getOrElse(false)
ctx.logger.debug(s"Filtering email attachments with '${glob.asString}'") *>
email
@ -193,7 +193,7 @@ object ExtractArchive {
): Stream[F, Extracted] = {
val (entry, subPos) = tentry
val mimeHint = MimeTypeHint.filename(entry.name).withAdvertised(entry.mime.asString)
val fileId = entry.data.through(ctx.store.fileStore.save(mimeHint))
val fileId = entry.data.through(ctx.store.fileStore.save(mimeHint))
Stream.eval(ctx.logger.debug(s"Extracted ${entry.name}. Storing as attachment.")) >>
fileId.evalMap { fid =>

View File

@ -140,7 +140,7 @@ object FindProposal {
val latestFirst = dates
.filter(_.date.isBefore(maxFuture.toUtcDate))
.sortWith((l1, l2) => l1.date.isAfter(l2.date))
val nowDate = now.value.atZone(ZoneId.of("GMT")).toLocalDate
val nowDate = now.value.atZone(ZoneId.of("GMT")).toLocalDate
val (after, before) = latestFirst.span(ndl => ndl.date.isAfter(nowDate))
val dueDates = MetaProposalList.fromSeq1(

View File

@ -146,12 +146,12 @@ object LinkProposal {
}
object Result {
case class NoneFound(proposalType: MetaProposalType) extends Result
case class SingleResult(proposalType: MetaProposalType) extends Result
case class NoneFound(proposalType: MetaProposalType) extends Result
case class SingleResult(proposalType: MetaProposalType) extends Result
case class MultipleResult(proposalType: MetaProposalType) extends Result
def noneFound(proposalType: MetaProposalType): Result = NoneFound(proposalType)
def single(proposalType: MetaProposalType): Result = SingleResult(proposalType)
def multiple(proposalType: MetaProposalType): Result = MultipleResult(proposalType)
def single(proposalType: MetaProposalType): Result = SingleResult(proposalType)
def multiple(proposalType: MetaProposalType): Result = MultipleResult(proposalType)
}
}

View File

@ -55,7 +55,7 @@ object ReProcessItem {
def loadItem[F[_]: Sync]: Task[F, Args, ItemData] =
Task { ctx =>
(for {
item <- OptionT(ctx.store.transact(RItem.findById(ctx.args.itemId)))
item <- OptionT(ctx.store.transact(RItem.findById(ctx.args.itemId)))
attach <- OptionT.liftF(ctx.store.transact(RAttachment.findByItem(item.id)))
asrc <-
OptionT.liftF(ctx.store.transact(RAttachmentSource.findByItem(ctx.args.itemId)))
@ -107,9 +107,9 @@ object ReProcessItem {
data.item.cid,
args.itemId.some,
lang,
None, //direction
None, //direction
data.item.source, //source-id
None, //folder
None, //folder
Seq.empty,
false,
None,

View File

@ -14,8 +14,7 @@ import docspell.joex.scheduler.{Context, Task}
import docspell.store.AddResult
import docspell.store.records._
/** Saves the proposals in the database
*/
/** Saves the proposals in the database */
object SaveProposals {
type Args = ProcessItemArgs

View File

@ -38,8 +38,8 @@ object SetGivenData {
ops: OItem[F]
): Task[F, Args, ItemData] =
Task { ctx =>
val itemId = data.item.id
val folderId = ctx.args.meta.folderId
val itemId = data.item.id
val folderId = ctx.args.meta.folderId
val collective = ctx.args.meta.collective
for {
_ <- ctx.logger.info("Starting setting given data")
@ -62,7 +62,7 @@ object SetGivenData {
ops: OItem[F]
): Task[F, Args, ItemData] =
Task { ctx =>
val itemId = data.item.id
val itemId = data.item.id
val collective = ctx.args.meta.collective
val tags =
(ctx.args.meta.tags.getOrElse(Nil) ++ data.tags ++ data.classifyTags).distinct

View File

@ -98,7 +98,7 @@ object TextAnalysis {
names <- ctx.store.transact(
ClassifierName.findTagClassifiers(ctx.args.meta.collective)
)
_ <- ctx.logger.debug(s"Guessing tags for ${names.size} categories")
_ <- ctx.logger.debug(s"Guessing tags for ${names.size} categories")
tags <- names.traverse(classifyWith)
} yield tags.flatten
}

View File

@ -47,7 +47,7 @@ object TextExtraction {
item.item.name.some,
None
)
_ <- fts.indexData(ctx.logger, (idxItem +: txt.map(_.td)).toSeq: _*)
_ <- fts.indexData(ctx.logger, (idxItem +: txt.map(_.td)).toSeq: _*)
dur <- start
extractedTags = txt.flatMap(_.tags).distinct.toList
_ <- ctx.logger.info(s"Text extraction finished in ${dur.formatExact}.")
@ -104,9 +104,9 @@ object TextExtraction {
)(ra: RAttachment): F[(RAttachmentMeta, List[String])] =
for {
_ <- ctx.logger.debug(s"Extracting text for attachment ${stripAttachmentName(ra)}")
dst <- Duration.stopTime[F]
dst <- Duration.stopTime[F]
fids <- filesToExtract(ctx)(item, ra)
res <- extractTextFallback(ctx, cfg, ra, lang)(fids)
res <- extractTextFallback(ctx, cfg, ra, lang)(fids)
meta = item.changeMeta(
ra.id,
lang,

View File

@ -26,8 +26,8 @@ object JoexRoutes {
HttpRoutes.of[F] {
case POST -> Root / "notify" =>
for {
_ <- app.scheduler.notifyChange
_ <- app.periodicScheduler.notifyChange
_ <- app.scheduler.notifyChange
_ <- app.periodicScheduler.notifyChange
resp <- Ok(BasicResult(true, "Schedulers notified."))
} yield resp

View File

@ -40,10 +40,10 @@ object ScanMailboxTask {
_ <- ctx.logger.info(
s"=== Start importing mails for user ${ctx.args.account.user.id}"
)
_ <- ctx.logger.debug(s"Settings: ${ctx.args.asJson.noSpaces}")
_ <- ctx.logger.debug(s"Settings: ${ctx.args.asJson.noSpaces}")
mailCfg <- getMailSettings(ctx)
folders = ctx.args.folders.mkString(", ")
userId = ctx.args.account.user
folders = ctx.args.folders.mkString(", ")
userId = ctx.args.account.user
imapConn = ctx.args.imapConnection
_ <- ctx.logger.info(
s"Reading mails for user ${userId.id} from ${imapConn.id}/$folders"
@ -76,8 +76,8 @@ object ScanMailboxTask {
joex: OJoex[F],
ctx: Context[F, Args]
): F[Unit] = {
val mailer = theEmil(mailCfg.toMailConfig)
val impl = new Impl[F](cfg, ctx)
val mailer = theEmil(mailCfg.toMailConfig)
val impl = new Impl[F](cfg, ctx)
val inFolders = ctx.args.folders.take(cfg.maxFolders)
val getInitialInput =
@ -150,7 +150,7 @@ object ScanMailboxTask {
name: String
): MailOp[F, C, ScanResult] =
for {
_ <- Kleisli.liftF(ctx.logger.info(s"Processing folder $name"))
_ <- Kleisli.liftF(ctx.logger.info(s"Processing folder $name"))
folder <- requireFolder(a)(name)
search <- searchMails(a)(folder)
items = search.mails.map(MailHeaderItem(_))
@ -290,7 +290,7 @@ object ScanMailboxTask {
mail.toByteStream
)
for {
_ <- ctx.logger.debug(s"Submitting mail '${mail.header.subject}'")
_ <- ctx.logger.debug(s"Submitting mail '${mail.header.subject}'")
dir <- getDirection(mail.header)
meta = OUpload.UploadMeta(
Some(dir),
@ -340,7 +340,7 @@ object ScanMailboxTask {
}
case class MailHeaderItem(mh: MailHeader, process: Boolean = true) {
def skip = copy(process = false)
def skip = copy(process = false)
def notProcess = !process
}
}

View File

@ -61,9 +61,9 @@ object Context {
store: Store[F]
): F[Context[F, A]] =
for {
_ <- log.ftrace("Creating logger for task run")
_ <- log.ftrace("Creating logger for task run")
logger <- QueueLogger(job.id, job.info, config.logBufferSize, logSink)
_ <- log.ftrace("Logger created, instantiating context")
_ <- log.ftrace("Logger created, instantiating context")
ctx = create[F, A](job.id, arg, config, logger, store)
} yield ctx

View File

@ -44,7 +44,7 @@ object PeriodicScheduler {
): Resource[F, PeriodicScheduler[F]] =
for {
waiter <- Resource.eval(SignallingRef(true))
state <- Resource.eval(SignallingRef(PeriodicSchedulerImpl.emptyState[F]))
state <- Resource.eval(SignallingRef(PeriodicSchedulerImpl.emptyState[F]))
psch = new PeriodicSchedulerImpl[F](
cfg,
sch,

View File

@ -62,9 +62,9 @@ final class PeriodicSchedulerImpl[F[_]: Async](
def mainLoop: Stream[F, Nothing] = {
val body: F[Boolean] =
for {
_ <- logger.fdebug(s"Going into main loop")
_ <- logger.fdebug(s"Going into main loop")
now <- Timestamp.current[F]
_ <- logger.fdebug(s"Looking for next periodic task")
_ <- logger.fdebug(s"Looking for next periodic task")
go <- logThrow("Error getting next task")(
store
.takeNext(config.name, None)

View File

@ -45,10 +45,10 @@ case class SchedulerBuilder[F[_]: Async](
def resource: Resource[F, Scheduler[F]] = {
val scheduler: Resource[F, SchedulerImpl[F]] = for {
jq <- queue
jq <- queue
waiter <- Resource.eval(SignallingRef(true))
state <- Resource.eval(SignallingRef(SchedulerImpl.emptyState[F]))
perms <- Resource.eval(Semaphore(config.poolSize.toLong))
state <- Resource.eval(SignallingRef(SchedulerImpl.emptyState[F]))
perms <- Resource.eval(Semaphore(config.poolSize.toLong))
} yield new SchedulerImpl[F](
config,
jq,

View File

@ -109,8 +109,8 @@ final class SchedulerImpl[F[_]: Async](
_ <- permits.available.flatMap(a =>
logger.fdebug(s"Try to acquire permit ($a free)")
)
_ <- permits.acquire
_ <- logger.fdebug("New permit acquired")
_ <- permits.acquire
_ <- logger.fdebug("New permit acquired")
down <- state.get.map(_.shutdownRequest)
rjob <-
if (down)
@ -159,11 +159,11 @@ final class SchedulerImpl[F[_]: Async](
_ <-
logger.fdebug(s"Creating context for job ${job.info} to run cancellation $t")
ctx <- Context[F, String](job, job.args, config, logSink, store)
_ <- t.onCancel.run(ctx)
_ <- state.modify(_.markCancelled(job))
_ <- onFinish(job, JobState.Cancelled)
_ <- ctx.logger.warn("Job has been cancelled.")
_ <- logger.fdebug(s"Job ${job.info} has been cancelled.")
_ <- t.onCancel.run(ctx)
_ <- state.modify(_.markCancelled(job))
_ <- onFinish(job, JobState.Cancelled)
_ <- ctx.logger.warn("Job has been cancelled.")
_ <- logger.fdebug(s"Job ${job.info} has been cancelled.")
} yield ()
}
}
@ -181,11 +181,11 @@ final class SchedulerImpl[F[_]: Async](
logger.ferror(s"Unable to start a task for job ${job.info}: $err")
case Right(t) =>
for {
_ <- logger.fdebug(s"Creating context for job ${job.info} to run $t")
_ <- logger.fdebug(s"Creating context for job ${job.info} to run $t")
ctx <- Context[F, String](job, job.args, config, logSink, store)
jot = wrapTask(job, t.task, ctx)
tok <- forkRun(job, jot.run(ctx), t.onCancel.run(ctx), ctx)
_ <- state.modify(_.addRunning(job, tok))
_ <- state.modify(_.addRunning(job, tok))
} yield ()
}
}

View File

@ -13,8 +13,7 @@ import cats.implicits._
import docspell.common.Logger
/** The code that is executed by the scheduler
*/
/** The code that is executed by the scheduler */
trait Task[F[_], A, B] {
def run(ctx: Context[F, A]): F[B]

View File

@ -29,8 +29,8 @@ object MakeMail {
): Mail[F] = {
val templateCtx = TemplateCtx(latestRelease, thisVersion)
val md = templateCtx.render(cfg.body)
val subj = templateCtx.render(cfg.subject)
val md = templateCtx.render(cfg.body)
val subj = templateCtx.render(cfg.subject)
MailBuilder.build(
From(smtpCfg.mailFrom),

View File

@ -58,9 +58,9 @@ object UpdateCheckTask {
s"Get SMTP connection for ${cfg.senderAccount.asString} and ${cfg.smtpId}"
)
smtpCfg <- findConnection(ctx, cfg)
_ <- ctx.logger.debug("Checking for latest release at GitHub")
latest <- updateCheck.latestRelease
_ <- ctx.logger.debug(s"Got latest release: $latest.")
_ <- ctx.logger.debug("Checking for latest release at GitHub")
latest <- updateCheck.latestRelease
_ <- ctx.logger.debug(s"Got latest release: $latest.")
_ <-
if (cfg.testRun)
ctx.logger.info(

View File

@ -16,12 +16,12 @@ class NerFileTest extends FunSuite {
test("create valid case insensitive patterns") {
val names = List(
"Some company AG" -> "(?i)some company ag",
"Acme GmbH" -> "(?i)acme gmbh",
"UP" -> "(?i)up",
"1 & 1" -> "(?i)1 & 1",
"Some company AG" -> "(?i)some company ag",
"Acme GmbH" -> "(?i)acme gmbh",
"UP" -> "(?i)up",
"1 & 1" -> "(?i)1 & 1",
"1 & 1 (Telefon / Internet)" -> "(?i)1 & 1 \\(telefon / internet\\)",
"X-corp (this)*-*[one]" -> "(?i)x\\-corp \\(this\\)\\*\\-\\*\\[one\\]"
"X-corp (this)*-*[one]" -> "(?i)x\\-corp \\(this\\)\\*\\-\\*\\[one\\]"
)
for ((name, first) <- names) {

View File

@ -13,8 +13,8 @@ import munit._
class CountingSchemeSpec extends FunSuite {
test("counting") {
val cs = CountingScheme(2, 1)
val list = List.iterate(cs.nextPriority, 6)(_._1.nextPriority).map(_._2)
val cs = CountingScheme(2, 1)
val list = List.iterate(cs.nextPriority, 6)(_._1.nextPriority).map(_._2)
val expect = List(Priority.High, Priority.High, Priority.Low)
assertEquals(list, expect ++ expect)
}

View File

@ -51,53 +51,54 @@ class UpdateCheckTest extends FunSuite {
object UpdateCheckTest {
val exampleResponsePartial = """
|{
| "url": "https://api.github.com/repos/eikek/docspell/releases/99899888",
| "assets_url": "https://api.github.com/repos/eikek/docspell/releases/99899888/assets",
| "upload_url": "https://uploads.github.com/repos/eikek/docspell/releases/99899888/assets{?name,label}",
| "html_url": "https://github.com/eikek/docspell/releases/tag/v0.26.0",
| "id": 99899888,
| "node_id": "MDc6UmVsZWFzZTQ4NjEwNTY2",
| "tag_name": "v0.26.0",
| "target_commitish": "master",
| "name": "Docspell 0.26.0",
| "draft": false,
| "prerelease": false,
| "created_at": "2021-08-28T10:02:01Z",
| "published_at": "2021-08-28T10:30:38Z",
| "assets": [
| {
| "url": "https://api.github.com/repos/eikek/docspell/releases/assets/43494218",
| "id": 43494218,
| "node_id": "MDEyOlJlbGVhc2VBc3NldDQzNDk0MjE4",
| "name": "docspell-joex-0.26.0.zip",
| "label": "",
| "content_type": "application/zip",
| "state": "uploaded",
| "size": 328163415,
| "download_count": 24,
| "created_at": "2021-08-28T10:16:24Z",
| "updated_at": "2021-08-28T10:16:36Z",
| "browser_download_url": "https://github.com/eikek/docspell/releases/download/v0.26.0/docspell-joex-0.26.0.zip"
| },
| {
| "url": "https://api.github.com/repos/eikek/docspell/releases/assets/43494232",
| "id": 43494232,
| "node_id": "MDEyOlJlbGVhc2VBc3NldDQzNDk0MjMy",
| "name": "docspell-joex_0.26.0_all.deb",
| "label": "",
| "content_type": "application/vnd.debian.binary-package",
| "state": "uploaded",
| "size": 337991872,
| "download_count": 8,
| "created_at": "2021-08-28T10:16:37Z",
| "updated_at": "2021-08-28T10:16:53Z",
| "browser_download_url": "https://github.com/eikek/docspell/releases/download/v0.26.0/docspell-joex_0.26.0_all.deb"
| }
| ],
| "tarball_url": "https://api.github.com/repos/eikek/docspell/tarball/v0.26.0",
| "zipball_url": "https://api.github.com/repos/eikek/docspell/zipball/v0.26.0"
|}
|""".stripMargin
val exampleResponsePartial =
"""
|{
| "url": "https://api.github.com/repos/eikek/docspell/releases/99899888",
| "assets_url": "https://api.github.com/repos/eikek/docspell/releases/99899888/assets",
| "upload_url": "https://uploads.github.com/repos/eikek/docspell/releases/99899888/assets{?name,label}",
| "html_url": "https://github.com/eikek/docspell/releases/tag/v0.26.0",
| "id": 99899888,
| "node_id": "MDc6UmVsZWFzZTQ4NjEwNTY2",
| "tag_name": "v0.26.0",
| "target_commitish": "master",
| "name": "Docspell 0.26.0",
| "draft": false,
| "prerelease": false,
| "created_at": "2021-08-28T10:02:01Z",
| "published_at": "2021-08-28T10:30:38Z",
| "assets": [
| {
| "url": "https://api.github.com/repos/eikek/docspell/releases/assets/43494218",
| "id": 43494218,
| "node_id": "MDEyOlJlbGVhc2VBc3NldDQzNDk0MjE4",
| "name": "docspell-joex-0.26.0.zip",
| "label": "",
| "content_type": "application/zip",
| "state": "uploaded",
| "size": 328163415,
| "download_count": 24,
| "created_at": "2021-08-28T10:16:24Z",
| "updated_at": "2021-08-28T10:16:36Z",
| "browser_download_url": "https://github.com/eikek/docspell/releases/download/v0.26.0/docspell-joex-0.26.0.zip"
| },
| {
| "url": "https://api.github.com/repos/eikek/docspell/releases/assets/43494232",
| "id": 43494232,
| "node_id": "MDEyOlJlbGVhc2VBc3NldDQzNDk0MjMy",
| "name": "docspell-joex_0.26.0_all.deb",
| "label": "",
| "content_type": "application/vnd.debian.binary-package",
| "state": "uploaded",
| "size": 337991872,
| "download_count": 8,
| "created_at": "2021-08-28T10:16:37Z",
| "updated_at": "2021-08-28T10:16:53Z",
| "browser_download_url": "https://github.com/eikek/docspell/releases/download/v0.26.0/docspell-joex_0.26.0_all.deb"
| }
| ],
| "tarball_url": "https://api.github.com/repos/eikek/docspell/tarball/v0.26.0",
| "zipball_url": "https://api.github.com/repos/eikek/docspell/zipball/v0.26.0"
|}
|""".stripMargin
}