mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-06-22 02:18:26 +00:00
Refactor scheduler into api / impl
This commit is contained in:
@ -31,16 +31,11 @@ import docspell.joex.process.ItemHandler
|
||||
import docspell.joex.process.ReProcessItem
|
||||
import docspell.joex.scanmailbox._
|
||||
import docspell.scheduler._
|
||||
import docspell.scheduler.impl.{
|
||||
PeriodicSchedulerBuilder,
|
||||
PeriodicTaskStore,
|
||||
SchedulerBuilder
|
||||
}
|
||||
import docspell.scheduler.impl.{JobStoreModuleBuilder, SchedulerModuleBuilder}
|
||||
import docspell.joex.updatecheck._
|
||||
import docspell.notification.api.NotificationModule
|
||||
import docspell.notification.impl.NotificationModuleImpl
|
||||
import docspell.pubsub.api.{PubSub, PubSubT}
|
||||
import docspell.scheduler.msg.JobQueuePublish
|
||||
import docspell.scheduler.usertask.{UserTaskScope, UserTaskStore}
|
||||
import docspell.store.Store
|
||||
import docspell.store.records.{REmptyTrashSetting, RJobLog}
|
||||
@ -50,8 +45,8 @@ import org.http4s.client.Client
|
||||
final class JoexAppImpl[F[_]: Async](
|
||||
cfg: Config,
|
||||
store: Store[F],
|
||||
queue: JobQueue[F],
|
||||
pstore: PeriodicTaskStore[F],
|
||||
uts: UserTaskStore[F],
|
||||
jobStore: JobStore[F],
|
||||
termSignal: SignallingRef[F, Boolean],
|
||||
notificationMod: NotificationModule[F],
|
||||
val scheduler: Scheduler[F],
|
||||
@ -82,32 +77,30 @@ final class JoexAppImpl[F[_]: Async](
|
||||
private def scheduleBackgroundTasks: F[Unit] =
|
||||
HouseKeepingTask
|
||||
.periodicTask[F](cfg.houseKeeping.schedule)
|
||||
.flatMap(pstore.insert) *>
|
||||
.flatMap(t => uts.updateTask(UserTaskScope.system, t.summary, t)) *>
|
||||
scheduleEmptyTrashTasks *>
|
||||
UpdateCheckTask
|
||||
.periodicTask(cfg.updateCheck)
|
||||
.flatMap(pstore.insert) *>
|
||||
MigrationTask.job.flatMap(queue.insertIfNew) *>
|
||||
.flatMap(t => uts.updateTask(UserTaskScope.system, t.summary, t)) *>
|
||||
MigrationTask.job.flatMap(jobStore.insertIfNew) *>
|
||||
AllPreviewsTask
|
||||
.job(MakePreviewArgs.StoreMode.WhenMissing, None)
|
||||
.flatMap(queue.insertIfNew) *>
|
||||
AllPageCountTask.job.flatMap(queue.insertIfNew).as(())
|
||||
.flatMap(jobStore.insertIfNew) *>
|
||||
AllPageCountTask.job.flatMap(jobStore.insertIfNew).void
|
||||
|
||||
private def scheduleEmptyTrashTasks: F[Unit] =
|
||||
store
|
||||
.transact(
|
||||
REmptyTrashSetting.findForAllCollectives(OCollective.EmptyTrash.default, 50)
|
||||
)
|
||||
.evalMap(es =>
|
||||
UserTaskStore(store).use { uts =>
|
||||
val args = EmptyTrashArgs(es.cid, es.minAge)
|
||||
uts.updateOneTask(
|
||||
UserTaskScope(args.collective),
|
||||
args.makeSubject.some,
|
||||
EmptyTrashTask.userTask(args, es.schedule)
|
||||
)
|
||||
}
|
||||
)
|
||||
.evalMap { es =>
|
||||
val args = EmptyTrashArgs(es.cid, es.minAge)
|
||||
uts.updateOneTask(
|
||||
UserTaskScope(args.collective),
|
||||
args.makeSubject.some,
|
||||
EmptyTrashTask.userTask(args, es.schedule)
|
||||
)
|
||||
}
|
||||
.compile
|
||||
.drain
|
||||
|
||||
@ -123,186 +116,186 @@ object JoexAppImpl extends MailAddressCodec {
|
||||
pubSub: PubSub[F]
|
||||
): Resource[F, JoexApp[F]] =
|
||||
for {
|
||||
pstore <- PeriodicTaskStore.create(store)
|
||||
joexLogger = docspell.logging.getLogger[F](s"joex-${cfg.appId.id}")
|
||||
joexLogger <- Resource.pure(docspell.logging.getLogger[F](s"joex-${cfg.appId.id}"))
|
||||
pubSubT = PubSubT(pubSub, joexLogger)
|
||||
javaEmil =
|
||||
JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug))
|
||||
notificationMod <- Resource.eval(
|
||||
NotificationModuleImpl[F](store, javaEmil, httpClient, 200)
|
||||
)
|
||||
queue <- JobQueuePublish(store, pubSubT, notificationMod)
|
||||
|
||||
jobStoreModule = JobStoreModuleBuilder(store)
|
||||
.withPubsub(pubSubT)
|
||||
.withEventSink(notificationMod)
|
||||
.build
|
||||
|
||||
joex <- OJoex(pubSubT)
|
||||
upload <- OUpload(store, queue, joex)
|
||||
upload <- OUpload(store, jobStoreModule.jobs, joex)
|
||||
fts <- createFtsClient(cfg)(httpClient)
|
||||
createIndex <- CreateIndex.resource(fts, store)
|
||||
itemOps <- OItem(store, fts, createIndex, queue, joex)
|
||||
itemOps <- OItem(store, fts, createIndex, jobStoreModule.jobs, joex)
|
||||
itemSearchOps <- OItemSearch(store)
|
||||
analyser <- TextAnalyser.create[F](cfg.textAnalysis.textAnalysisConfig)
|
||||
regexNer <- RegexNerFile(cfg.textAnalysis.regexNerFileConfig, store)
|
||||
updateCheck <- UpdateCheck.resource(httpClient)
|
||||
notification <- ONotification(store, notificationMod)
|
||||
fileRepo <- OFileRepository(store, queue, joex)
|
||||
sch <- SchedulerBuilder(cfg.scheduler, store)
|
||||
.withQueue(queue)
|
||||
.withPubSub(pubSubT)
|
||||
.withEventSink(notificationMod)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
ProcessItemArgs.taskName,
|
||||
ItemHandler.newItem[F](cfg, itemOps, fts, analyser, regexNer),
|
||||
ItemHandler.onCancel[F]
|
||||
fileRepo <- OFileRepository(store, jobStoreModule.jobs, joex)
|
||||
|
||||
schedulerModule <- SchedulerModuleBuilder(jobStoreModule)
|
||||
.withSchedulerConfig(cfg.scheduler)
|
||||
.withPeriodicSchedulerConfig(cfg.periodicScheduler)
|
||||
.withTaskRegistry(JobTaskRegistry
|
||||
.empty[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
ProcessItemArgs.taskName,
|
||||
ItemHandler.newItem[F](cfg, itemOps, fts, analyser, regexNer),
|
||||
ItemHandler.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
ReProcessItemArgs.taskName,
|
||||
ReProcessItem[F](cfg, fts, itemOps, analyser, regexNer),
|
||||
ReProcessItem.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
ReProcessItemArgs.taskName,
|
||||
ReProcessItem[F](cfg, fts, itemOps, analyser, regexNer),
|
||||
ReProcessItem.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
ScanMailboxArgs.taskName,
|
||||
ScanMailboxTask[F](cfg.userTasks.scanMailbox, javaEmil, upload, joex),
|
||||
ScanMailboxTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
ScanMailboxArgs.taskName,
|
||||
ScanMailboxTask[F](cfg.userTasks.scanMailbox, javaEmil, upload, joex),
|
||||
ScanMailboxTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
MigrationTask.taskName,
|
||||
MigrationTask[F](cfg.fullTextSearch, fts, createIndex),
|
||||
MigrationTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
MigrationTask.taskName,
|
||||
MigrationTask[F](cfg.fullTextSearch, fts, createIndex),
|
||||
MigrationTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
ReIndexTask.taskName,
|
||||
ReIndexTask[F](cfg.fullTextSearch, fts, createIndex),
|
||||
ReIndexTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
ReIndexTask.taskName,
|
||||
ReIndexTask[F](cfg.fullTextSearch, fts, createIndex),
|
||||
ReIndexTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
HouseKeepingTask.taskName,
|
||||
HouseKeepingTask[F](cfg, fileRepo),
|
||||
HouseKeepingTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
HouseKeepingTask.taskName,
|
||||
HouseKeepingTask[F](cfg, fileRepo),
|
||||
HouseKeepingTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
PdfConvTask.taskName,
|
||||
PdfConvTask[F](cfg),
|
||||
PdfConvTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
PdfConvTask.taskName,
|
||||
PdfConvTask[F](cfg),
|
||||
PdfConvTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
ConvertAllPdfArgs.taskName,
|
||||
ConvertAllPdfTask[F](queue, joex),
|
||||
ConvertAllPdfTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
ConvertAllPdfArgs.taskName,
|
||||
ConvertAllPdfTask[F](jobStoreModule.jobs, joex),
|
||||
ConvertAllPdfTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
LearnClassifierArgs.taskName,
|
||||
LearnClassifierTask[F](cfg.textAnalysis, analyser),
|
||||
LearnClassifierTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
LearnClassifierArgs.taskName,
|
||||
LearnClassifierTask[F](cfg.textAnalysis, analyser),
|
||||
LearnClassifierTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
MakePreviewArgs.taskName,
|
||||
MakePreviewTask[F](cfg.extraction.preview),
|
||||
MakePreviewTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
MakePreviewArgs.taskName,
|
||||
MakePreviewTask[F](cfg.extraction.preview),
|
||||
MakePreviewTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
AllPreviewsArgs.taskName,
|
||||
AllPreviewsTask[F](queue, joex),
|
||||
AllPreviewsTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
AllPreviewsArgs.taskName,
|
||||
AllPreviewsTask[F](jobStoreModule.jobs, joex),
|
||||
AllPreviewsTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
MakePageCountArgs.taskName,
|
||||
MakePageCountTask[F](),
|
||||
MakePageCountTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
MakePageCountArgs.taskName,
|
||||
MakePageCountTask[F](),
|
||||
MakePageCountTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
AllPageCountTask.taskName,
|
||||
AllPageCountTask[F](queue, joex),
|
||||
AllPageCountTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
AllPageCountTask.taskName,
|
||||
AllPageCountTask[F](jobStoreModule.jobs, joex),
|
||||
AllPageCountTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
EmptyTrashArgs.taskName,
|
||||
EmptyTrashTask[F](itemOps, itemSearchOps),
|
||||
EmptyTrashTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
EmptyTrashArgs.taskName,
|
||||
EmptyTrashTask[F](itemOps, itemSearchOps),
|
||||
EmptyTrashTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
UpdateCheckTask.taskName,
|
||||
UpdateCheckTask[F](
|
||||
cfg.updateCheck,
|
||||
cfg.sendMail,
|
||||
javaEmil,
|
||||
updateCheck,
|
||||
ThisVersion.default
|
||||
),
|
||||
UpdateCheckTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
UpdateCheckTask.taskName,
|
||||
UpdateCheckTask[F](
|
||||
cfg.updateCheck,
|
||||
cfg.sendMail,
|
||||
javaEmil,
|
||||
updateCheck,
|
||||
ThisVersion.default
|
||||
),
|
||||
UpdateCheckTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
PeriodicQueryTask.taskName,
|
||||
PeriodicQueryTask[F](notification),
|
||||
PeriodicQueryTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
PeriodicQueryTask.taskName,
|
||||
PeriodicQueryTask[F](notification),
|
||||
PeriodicQueryTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
PeriodicDueItemsTask.taskName,
|
||||
PeriodicDueItemsTask[F](notification),
|
||||
PeriodicDueItemsTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
PeriodicDueItemsTask.taskName,
|
||||
PeriodicDueItemsTask[F](notification),
|
||||
PeriodicDueItemsTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
FileCopyTaskArgs.taskName,
|
||||
FileCopyTask[F](cfg),
|
||||
FileCopyTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
FileCopyTaskArgs.taskName,
|
||||
FileCopyTask[F](cfg),
|
||||
FileCopyTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
FileIntegrityCheckArgs.taskName,
|
||||
FileIntegrityCheckTask[F](fileRepo),
|
||||
FileIntegrityCheckTask.onCancel[F]
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
FileIntegrityCheckArgs.taskName,
|
||||
FileIntegrityCheckTask[F](fileRepo),
|
||||
FileIntegrityCheckTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
)
|
||||
.resource
|
||||
psch <- PeriodicSchedulerBuilder.build(
|
||||
cfg.periodicScheduler,
|
||||
sch,
|
||||
queue,
|
||||
pstore,
|
||||
pubSubT
|
||||
)
|
||||
app = new JoexAppImpl(
|
||||
cfg,
|
||||
store,
|
||||
queue,
|
||||
pstore,
|
||||
jobStoreModule.userTasks,
|
||||
jobStoreModule.jobs,
|
||||
termSignal,
|
||||
notificationMod,
|
||||
sch,
|
||||
psch
|
||||
schedulerModule.scheduler,
|
||||
schedulerModule.periodicScheduler
|
||||
)
|
||||
appR <- Resource.make(app.init.map(_ => app))(_.initShutdown)
|
||||
} yield appR
|
||||
@ -312,4 +305,5 @@ object JoexAppImpl extends MailAddressCodec {
|
||||
)(client: Client[F]): Resource[F, FtsClient[F]] =
|
||||
if (cfg.fullTextSearch.enabled) SolrFtsClient(cfg.fullTextSearch.solr, client)
|
||||
else Resource.pure[F, FtsClient[F]](FtsClient.none[F])
|
||||
|
||||
}
|
||||
|
@ -8,13 +8,11 @@ package docspell.joex.fts
|
||||
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
|
||||
import docspell.backend.fulltext.CreateIndex
|
||||
import docspell.common._
|
||||
import docspell.ftsclient._
|
||||
import docspell.joex.Config
|
||||
import docspell.scheduler.Task
|
||||
import docspell.store.records.RJob
|
||||
import docspell.scheduler.{Job, Task}
|
||||
|
||||
object MigrationTask {
|
||||
val taskName = Ident.unsafe("full-text-index")
|
||||
@ -38,21 +36,18 @@ object MigrationTask {
|
||||
def onCancel[F[_]]: Task[F, Unit, Unit] =
|
||||
Task.log[F, Unit](_.warn("Cancelling full-text-index task"))
|
||||
|
||||
def job[F[_]: Sync]: F[RJob] =
|
||||
for {
|
||||
id <- Ident.randomId[F]
|
||||
now <- Timestamp.current[F]
|
||||
} yield RJob.newJob(
|
||||
id,
|
||||
taskName,
|
||||
DocspellSystem.taskGroup,
|
||||
(),
|
||||
"Create full-text index",
|
||||
now,
|
||||
DocspellSystem.taskGroup,
|
||||
Priority.Low,
|
||||
Some(DocspellSystem.migrationTaskTracker)
|
||||
)
|
||||
def job[F[_]: Sync]: F[Job[String]] =
|
||||
Job
|
||||
.createNew(
|
||||
taskName,
|
||||
DocspellSystem.taskGroup,
|
||||
(),
|
||||
"Create full-text index",
|
||||
DocspellSystem.taskGroup,
|
||||
Priority.Low,
|
||||
Some(DocspellSystem.migrationTaskTracker)
|
||||
)
|
||||
.map(_.encode)
|
||||
|
||||
def migrationTasks[F[_]: Async](fts: FtsClient[F]): F[List[Migration[F]]] =
|
||||
fts.initialize.map(_.map(fm => Migration.from(fm)))
|
||||
|
@ -13,9 +13,8 @@ import docspell.common._
|
||||
import docspell.joex.Config
|
||||
import docspell.joex.filecopy.FileIntegrityCheckTask
|
||||
import docspell.scheduler.{JobTaskResultEncoder, Task}
|
||||
import docspell.store.records._
|
||||
import com.github.eikek.calev._
|
||||
import docspell.scheduler.usertask.{QUserTask, UserTaskScope}
|
||||
import docspell.scheduler.usertask.UserTask
|
||||
import io.circe.Encoder
|
||||
import io.circe.generic.semiauto.deriveEncoder
|
||||
|
||||
@ -45,19 +44,15 @@ object HouseKeepingTask {
|
||||
def onCancel[F[_]]: Task[F, Unit, Unit] =
|
||||
Task.log[F, Unit](_.warn("Cancelling house-keeping task"))
|
||||
|
||||
def periodicTask[F[_]: Sync](ce: CalEvent): F[RPeriodicTask] =
|
||||
QUserTask
|
||||
.createJson(
|
||||
true,
|
||||
UserTaskScope(DocspellSystem.taskGroup),
|
||||
taskName,
|
||||
(),
|
||||
"Docspell house-keeping",
|
||||
Priority.Low,
|
||||
ce,
|
||||
None
|
||||
)
|
||||
.map(_.copy(id = periodicId))
|
||||
def periodicTask[F[_]: Sync](ce: CalEvent): F[UserTask[Unit]] =
|
||||
UserTask(
|
||||
periodicId,
|
||||
taskName,
|
||||
true,
|
||||
ce,
|
||||
"Docspell house-keeping".some,
|
||||
()
|
||||
).pure[F]
|
||||
|
||||
case class Result(
|
||||
checkNodes: CleanupResult,
|
||||
|
@ -12,20 +12,19 @@ import fs2.{Chunk, Stream}
|
||||
import docspell.backend.JobFactory
|
||||
import docspell.backend.ops.OJoex
|
||||
import docspell.common._
|
||||
import docspell.scheduler.{Context, JobQueue, Task}
|
||||
import docspell.scheduler.{Context, Job, JobStore, Task}
|
||||
import docspell.store.records.RAttachment
|
||||
import docspell.store.records.RJob
|
||||
|
||||
object AllPageCountTask {
|
||||
|
||||
val taskName = Ident.unsafe("all-page-count")
|
||||
type Args = Unit
|
||||
|
||||
def apply[F[_]: Sync](queue: JobQueue[F], joex: OJoex[F]): Task[F, Args, Unit] =
|
||||
def apply[F[_]: Sync](jobStore: JobStore[F], joex: OJoex[F]): Task[F, Args, Unit] =
|
||||
Task { ctx =>
|
||||
for {
|
||||
_ <- ctx.logger.info("Generating previews for attachments")
|
||||
n <- submitConversionJobs(ctx, queue)
|
||||
n <- submitConversionJobs(ctx, jobStore)
|
||||
_ <- ctx.logger.info(s"Submitted $n jobs")
|
||||
_ <- joex.notifyAllNodes
|
||||
} yield ()
|
||||
@ -36,14 +35,14 @@ object AllPageCountTask {
|
||||
|
||||
def submitConversionJobs[F[_]: Sync](
|
||||
ctx: Context[F, Args],
|
||||
queue: JobQueue[F]
|
||||
jobStore: JobStore[F]
|
||||
): F[Int] =
|
||||
ctx.store
|
||||
.transact(findAttachments)
|
||||
.chunks
|
||||
.flatMap(createJobs[F])
|
||||
.chunks
|
||||
.evalMap(jobs => queue.insertAllIfNew(jobs.toVector).map(_ => jobs.size))
|
||||
.evalMap(jobs => jobStore.insertAllIfNew(jobs.toVector).map(_ => jobs.size))
|
||||
.evalTap(n => ctx.logger.debug(s"Submitted $n jobs …"))
|
||||
.compile
|
||||
.foldMonoid
|
||||
@ -51,28 +50,25 @@ object AllPageCountTask {
|
||||
private def findAttachments[F[_]] =
|
||||
RAttachment.findAllWithoutPageCount(50)
|
||||
|
||||
private def createJobs[F[_]: Sync](ras: Chunk[RAttachment]): Stream[F, RJob] = {
|
||||
def mkJob(ra: RAttachment): F[RJob] =
|
||||
private def createJobs[F[_]: Sync](ras: Chunk[RAttachment]): Stream[F, Job[String]] = {
|
||||
def mkJob(ra: RAttachment): F[Job[MakePageCountArgs]] =
|
||||
JobFactory.makePageCount(MakePageCountArgs(ra.id), None)
|
||||
|
||||
val jobs = ras.traverse(mkJob)
|
||||
Stream.evalUnChunk(jobs)
|
||||
Stream.evalUnChunk(jobs).map(_.encode)
|
||||
}
|
||||
|
||||
def job[F[_]: Sync]: F[RJob] =
|
||||
for {
|
||||
id <- Ident.randomId[F]
|
||||
now <- Timestamp.current[F]
|
||||
} yield RJob.newJob(
|
||||
id,
|
||||
AllPageCountTask.taskName,
|
||||
DocspellSystem.taskGroup,
|
||||
(),
|
||||
"Create all page-counts",
|
||||
now,
|
||||
DocspellSystem.taskGroup,
|
||||
Priority.Low,
|
||||
Some(DocspellSystem.allPageCountTaskTracker)
|
||||
)
|
||||
def job[F[_]: Sync]: F[Job[String]] =
|
||||
Job
|
||||
.createNew(
|
||||
AllPageCountTask.taskName,
|
||||
DocspellSystem.taskGroup,
|
||||
(),
|
||||
"Create all page-counts",
|
||||
DocspellSystem.taskGroup,
|
||||
Priority.Low,
|
||||
Some(DocspellSystem.allPageCountTaskTracker)
|
||||
)
|
||||
.map(_.encode)
|
||||
|
||||
}
|
||||
|
@ -11,9 +11,8 @@ import cats.implicits._
|
||||
import fs2.{Chunk, Stream}
|
||||
import docspell.backend.ops.OJoex
|
||||
import docspell.common._
|
||||
import docspell.scheduler.{Context, JobQueue, Task}
|
||||
import docspell.scheduler.{Context, Job, JobStore, Task}
|
||||
import docspell.store.records.RAttachment
|
||||
import docspell.store.records._
|
||||
|
||||
/* A task to find all non-converted pdf files (of a collective, or
|
||||
* all) and converting them using ocrmypdf by submitting a job for
|
||||
@ -22,11 +21,11 @@ import docspell.store.records._
|
||||
object ConvertAllPdfTask {
|
||||
type Args = ConvertAllPdfArgs
|
||||
|
||||
def apply[F[_]: Sync](queue: JobQueue[F], joex: OJoex[F]): Task[F, Args, Unit] =
|
||||
def apply[F[_]: Sync](jobStore: JobStore[F], joex: OJoex[F]): Task[F, Args, Unit] =
|
||||
Task { ctx =>
|
||||
for {
|
||||
_ <- ctx.logger.info("Converting pdfs using ocrmypdf")
|
||||
n <- submitConversionJobs(ctx, queue)
|
||||
n <- submitConversionJobs(ctx, jobStore)
|
||||
_ <- ctx.logger.info(s"Submitted $n file conversion jobs")
|
||||
_ <- joex.notifyAllNodes
|
||||
} yield ()
|
||||
@ -37,40 +36,35 @@ object ConvertAllPdfTask {
|
||||
|
||||
def submitConversionJobs[F[_]: Sync](
|
||||
ctx: Context[F, Args],
|
||||
queue: JobQueue[F]
|
||||
jobStore: JobStore[F]
|
||||
): F[Int] =
|
||||
ctx.store
|
||||
.transact(RAttachment.findNonConvertedPdf(ctx.args.collective, 50))
|
||||
.chunks
|
||||
.flatMap(createJobs[F](ctx))
|
||||
.chunks
|
||||
.evalMap(jobs => queue.insertAllIfNew(jobs.toVector).map(_ => jobs.size))
|
||||
.evalMap(jobs => jobStore.insertAllIfNew(jobs.toVector).map(_ => jobs.size))
|
||||
.evalTap(n => ctx.logger.debug(s"Submitted $n jobs …"))
|
||||
.compile
|
||||
.foldMonoid
|
||||
|
||||
private def createJobs[F[_]: Sync](
|
||||
ctx: Context[F, Args]
|
||||
)(ras: Chunk[RAttachment]): Stream[F, RJob] = {
|
||||
)(ras: Chunk[RAttachment]): Stream[F, Job[String]] = {
|
||||
val collectiveOrSystem = ctx.args.collective.getOrElse(DocspellSystem.taskGroup)
|
||||
|
||||
def mkJob(ra: RAttachment): F[RJob] =
|
||||
for {
|
||||
id <- Ident.randomId[F]
|
||||
now <- Timestamp.current[F]
|
||||
} yield RJob.newJob(
|
||||
id,
|
||||
def mkJob(ra: RAttachment): F[Job[PdfConvTask.Args]] =
|
||||
Job.createNew(
|
||||
PdfConvTask.taskName,
|
||||
collectiveOrSystem,
|
||||
PdfConvTask.Args(ra.id),
|
||||
s"Convert pdf ${ra.id.id}/${ra.name.getOrElse("-")}",
|
||||
now,
|
||||
collectiveOrSystem,
|
||||
Priority.Low,
|
||||
Some(PdfConvTask.taskName / ra.id)
|
||||
)
|
||||
|
||||
val jobs = ras.traverse(mkJob)
|
||||
Stream.evalUnChunk(jobs)
|
||||
Stream.evalUnChunk(jobs).map(_.encode)
|
||||
}
|
||||
}
|
||||
|
@ -13,19 +13,18 @@ import docspell.backend.JobFactory
|
||||
import docspell.backend.ops.OJoex
|
||||
import docspell.common.MakePreviewArgs.StoreMode
|
||||
import docspell.common._
|
||||
import docspell.scheduler.{Context, JobQueue, Task}
|
||||
import docspell.scheduler.{Context, Job, JobStore, Task}
|
||||
import docspell.store.records.RAttachment
|
||||
import docspell.store.records.RJob
|
||||
|
||||
object AllPreviewsTask {
|
||||
|
||||
type Args = AllPreviewsArgs
|
||||
|
||||
def apply[F[_]: Sync](queue: JobQueue[F], joex: OJoex[F]): Task[F, Args, Unit] =
|
||||
def apply[F[_]: Sync](jobStore: JobStore[F], joex: OJoex[F]): Task[F, Args, Unit] =
|
||||
Task { ctx =>
|
||||
for {
|
||||
_ <- ctx.logger.info("Generating previews for attachments")
|
||||
n <- submitConversionJobs(ctx, queue)
|
||||
n <- submitConversionJobs(ctx, jobStore)
|
||||
_ <- ctx.logger.info(s"Submitted $n jobs")
|
||||
_ <- joex.notifyAllNodes
|
||||
} yield ()
|
||||
@ -36,14 +35,16 @@ object AllPreviewsTask {
|
||||
|
||||
def submitConversionJobs[F[_]: Sync](
|
||||
ctx: Context[F, Args],
|
||||
queue: JobQueue[F]
|
||||
jobStore: JobStore[F]
|
||||
): F[Int] =
|
||||
ctx.store
|
||||
.transact(findAttachments(ctx))
|
||||
.chunks
|
||||
.flatMap(createJobs[F](ctx))
|
||||
.chunks
|
||||
.evalMap(jobs => queue.insertAllIfNew(jobs.toVector).map(_ => jobs.size))
|
||||
.evalMap(jobs =>
|
||||
jobStore.insertAllIfNew(jobs.map(_.encode).toVector).map(_ => jobs.size)
|
||||
)
|
||||
.evalTap(n => ctx.logger.debug(s"Submitted $n jobs …"))
|
||||
.compile
|
||||
.foldMonoid
|
||||
@ -58,13 +59,13 @@ object AllPreviewsTask {
|
||||
|
||||
private def createJobs[F[_]: Sync](
|
||||
ctx: Context[F, Args]
|
||||
)(ras: Chunk[RAttachment]): Stream[F, RJob] = {
|
||||
)(ras: Chunk[RAttachment]): Stream[F, Job[MakePreviewArgs]] = {
|
||||
val collectiveOrSystem = {
|
||||
val cid = ctx.args.collective.getOrElse(DocspellSystem.taskGroup)
|
||||
AccountId(cid, DocspellSystem.user)
|
||||
}
|
||||
|
||||
def mkJob(ra: RAttachment): F[RJob] =
|
||||
def mkJob(ra: RAttachment): F[Job[MakePreviewArgs]] =
|
||||
JobFactory.makePreview(
|
||||
MakePreviewArgs(ra.id, ctx.args.storeMode),
|
||||
collectiveOrSystem.some
|
||||
@ -74,7 +75,10 @@ object AllPreviewsTask {
|
||||
Stream.evalUnChunk(jobs)
|
||||
}
|
||||
|
||||
def job[F[_]: Sync](storeMode: MakePreviewArgs.StoreMode, cid: Option[Ident]): F[RJob] =
|
||||
JobFactory.allPreviews(AllPreviewsArgs(cid, storeMode), None)
|
||||
def job[F[_]: Sync](
|
||||
storeMode: MakePreviewArgs.StoreMode,
|
||||
cid: Option[Ident]
|
||||
): F[Job[String]] =
|
||||
JobFactory.allPreviews(AllPreviewsArgs(cid, storeMode), None).map(_.encode)
|
||||
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ object ItemHandler {
|
||||
.map(_ => data)
|
||||
)
|
||||
|
||||
def isLastRetry[F[_]: Sync]: Task[F, Args, Boolean] =
|
||||
def isLastRetry[F[_]]: Task[F, Args, Boolean] =
|
||||
Task(_.isLastRetry)
|
||||
|
||||
def safeProcess[F[_]: Async](
|
||||
|
@ -141,7 +141,7 @@ object ReProcessItem {
|
||||
lang1.orElse(lang2).getOrElse(Language.German)
|
||||
}
|
||||
|
||||
def isLastRetry[F[_]: Sync]: Task[F, Args, Boolean] =
|
||||
def isLastRetry[F[_]]: Task[F, Args, Boolean] =
|
||||
Task(_.isLastRetry)
|
||||
|
||||
def safeProcess[F[_]: Async](
|
||||
|
@ -12,8 +12,7 @@ import cats.implicits._
|
||||
import docspell.common._
|
||||
import docspell.scheduler.Context
|
||||
import docspell.scheduler.Task
|
||||
import docspell.scheduler.usertask.{UserTask, UserTaskScope}
|
||||
import docspell.store.records.RPeriodicTask
|
||||
import docspell.scheduler.usertask.UserTask
|
||||
import docspell.store.records.RUserEmail
|
||||
import emil._
|
||||
|
||||
@ -25,18 +24,15 @@ object UpdateCheckTask {
|
||||
def onCancel[F[_]]: Task[F, Args, Unit] =
|
||||
Task.log(_.warn("Cancelling update-check task"))
|
||||
|
||||
def periodicTask[F[_]: Sync](cfg: UpdateCheckConfig): F[RPeriodicTask] =
|
||||
def periodicTask[F[_]: Sync](cfg: UpdateCheckConfig): F[UserTask[Unit]] =
|
||||
UserTask(
|
||||
Ident.unsafe("docspell-update-check"),
|
||||
taskName,
|
||||
cfg.enabled,
|
||||
cfg.schedule,
|
||||
None,
|
||||
"Docspell Update Check".some,
|
||||
()
|
||||
).encode.toPeriodicTask(
|
||||
UserTaskScope(cfg.senderAccount.collective),
|
||||
"Docspell Update Check".some
|
||||
)
|
||||
).pure[F]
|
||||
|
||||
def apply[F[_]: Async](
|
||||
cfg: UpdateCheckConfig,
|
||||
|
@ -1,22 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.joex.scheduler
|
||||
|
||||
import docspell.common.Priority
|
||||
|
||||
import munit._
|
||||
|
||||
class CountingSchemeSpec extends FunSuite {
|
||||
|
||||
test("counting") {
|
||||
val cs = CountingScheme(2, 1)
|
||||
val list = List.iterate(cs.nextPriority, 6)(_._1.nextPriority).map(_._2)
|
||||
val expect = List(Priority.High, Priority.High, Priority.Low)
|
||||
assertEquals(list, expect ++ expect)
|
||||
}
|
||||
|
||||
}
|
Reference in New Issue
Block a user