Refactor scheduler into api / impl

This commit is contained in:
eikek 2022-03-13 14:27:06 +01:00
parent 69765f05ff
commit 3a05dc56cb
50 changed files with 1076 additions and 867 deletions

View File

@ -530,7 +530,7 @@ val schedulerApi = project
Dependencies.fs2Core ++ Dependencies.fs2Core ++
Dependencies.circeCore Dependencies.circeCore
) )
.dependsOn(loggingApi, common, store, notificationApi, pubsubApi) .dependsOn(loggingApi, common, store, pubsubApi)
val schedulerImpl = project val schedulerImpl = project
.in(file("modules/scheduler/impl")) .in(file("modules/scheduler/impl"))
@ -837,7 +837,8 @@ val restserver = project
ftssolr, ftssolr,
oidc, oidc,
pubsubNaive, pubsubNaive,
notificationImpl notificationImpl,
schedulerImpl
) )
// --- Website Documentation // --- Website Documentation

View File

@ -14,8 +14,7 @@ import docspell.backend.signup.OSignup
import docspell.ftsclient.FtsClient import docspell.ftsclient.FtsClient
import docspell.notification.api.{EventExchange, NotificationModule} import docspell.notification.api.{EventExchange, NotificationModule}
import docspell.pubsub.api.PubSubT import docspell.pubsub.api.PubSubT
import docspell.scheduler.msg.JobQueuePublish import docspell.scheduler.JobStoreModule
import docspell.scheduler.usertask.UserTaskStore
import docspell.store.Store import docspell.store.Store
import docspell.totp.Totp import docspell.totp.Totp
import emil.Emil import emil.Emil
@ -58,29 +57,43 @@ object BackendApp {
javaEmil: Emil[F], javaEmil: Emil[F],
ftsClient: FtsClient[F], ftsClient: FtsClient[F],
pubSubT: PubSubT[F], pubSubT: PubSubT[F],
schedulerModule: JobStoreModule[F],
notificationMod: NotificationModule[F] notificationMod: NotificationModule[F]
): Resource[F, BackendApp[F]] = ): Resource[F, BackendApp[F]] =
for { for {
utStore <- UserTaskStore(store)
queue <- JobQueuePublish(store, pubSubT, notificationMod)
totpImpl <- OTotp(store, Totp.default) totpImpl <- OTotp(store, Totp.default)
loginImpl <- Login[F](store, Totp.default) loginImpl <- Login[F](store, Totp.default)
signupImpl <- OSignup[F](store) signupImpl <- OSignup[F](store)
joexImpl <- OJoex(pubSubT) joexImpl <- OJoex(pubSubT)
collImpl <- OCollective[F](store, utStore, queue, joexImpl) collImpl <- OCollective[F](
store,
schedulerModule.userTasks,
schedulerModule.jobs,
joexImpl
)
sourceImpl <- OSource[F](store) sourceImpl <- OSource[F](store)
tagImpl <- OTag[F](store) tagImpl <- OTag[F](store)
equipImpl <- OEquipment[F](store) equipImpl <- OEquipment[F](store)
orgImpl <- OOrganization(store) orgImpl <- OOrganization(store)
uploadImpl <- OUpload(store, queue, joexImpl) uploadImpl <- OUpload(store, schedulerModule.jobs, joexImpl)
nodeImpl <- ONode(store) nodeImpl <- ONode(store)
jobImpl <- OJob(store, joexImpl, pubSubT) jobImpl <- OJob(store, joexImpl, pubSubT)
createIndex <- CreateIndex.resource(ftsClient, store) createIndex <- CreateIndex.resource(ftsClient, store)
itemImpl <- OItem(store, ftsClient, createIndex, queue, joexImpl) itemImpl <- OItem(store, ftsClient, createIndex, schedulerModule.jobs, joexImpl)
itemSearchImpl <- OItemSearch(store) itemSearchImpl <- OItemSearch(store)
fulltextImpl <- OFulltext(itemSearchImpl, ftsClient, store, queue, joexImpl) fulltextImpl <- OFulltext(
itemSearchImpl,
ftsClient,
store,
schedulerModule.jobs,
joexImpl
)
mailImpl <- OMail(store, javaEmil) mailImpl <- OMail(store, javaEmil)
userTaskImpl <- OUserTask(utStore, store, queue, joexImpl) userTaskImpl <- OUserTask(
schedulerModule.userTasks,
store,
joexImpl
)
folderImpl <- OFolder(store) folderImpl <- OFolder(store)
customFieldsImpl <- OCustomFields(store) customFieldsImpl <- OCustomFields(store)
simpleSearchImpl = OSimpleSearch(fulltextImpl, itemSearchImpl) simpleSearchImpl = OSimpleSearch(fulltextImpl, itemSearchImpl)
@ -90,7 +103,7 @@ object BackendApp {
) )
notifyImpl <- ONotification(store, notificationMod) notifyImpl <- ONotification(store, notificationMod)
bookmarksImpl <- OQueryBookmarks(store) bookmarksImpl <- OQueryBookmarks(store)
fileRepoImpl <- OFileRepository(store, queue, joexImpl) fileRepoImpl <- OFileRepository(store, schedulerModule.jobs, joexImpl)
} yield new BackendApp[F] { } yield new BackendApp[F] {
val pubSub = pubSubT val pubSub = pubSubT
val login = loginImpl val login = loginImpl

View File

@ -8,124 +8,91 @@ package docspell.backend
import cats.effect._ import cats.effect._
import cats.implicits._ import cats.implicits._
import docspell.backend.MailAddressCodec import docspell.backend.MailAddressCodec
import docspell.common._ import docspell.common._
import docspell.notification.api.PeriodicQueryArgs import docspell.notification.api.PeriodicQueryArgs
import docspell.store.records.RJob import docspell.scheduler.Job
object JobFactory extends MailAddressCodec { object JobFactory extends MailAddressCodec {
def integrityCheck[F[_]: Sync]( def integrityCheck[F[_]: Sync](
args: FileIntegrityCheckArgs, args: FileIntegrityCheckArgs,
submitter: AccountId = DocspellSystem.account submitter: AccountId = DocspellSystem.account
): F[RJob] = ): F[Job[FileIntegrityCheckArgs]] =
for { Job.createNew(
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
FileIntegrityCheckArgs.taskName, FileIntegrityCheckArgs.taskName,
submitter.collective, submitter.collective,
args, args,
s"Check integrity of files", s"Check integrity of files",
now,
submitter.user, submitter.user,
Priority.High, Priority.High,
Some(FileIntegrityCheckArgs.taskName) Some(FileIntegrityCheckArgs.taskName)
) )
} yield job
def fileCopy[F[_]: Sync]( def fileCopy[F[_]: Sync](
args: FileCopyTaskArgs, args: FileCopyTaskArgs,
submitter: AccountId = DocspellSystem.account submitter: AccountId = DocspellSystem.account
): F[RJob] = ): F[Job[FileCopyTaskArgs]] =
for { Job.createNew(
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
FileCopyTaskArgs.taskName, FileCopyTaskArgs.taskName,
submitter.collective, submitter.collective,
args, args,
s"Copying all files", "Copying all files",
now,
submitter.user, submitter.user,
Priority.High, Priority.High,
Some(FileCopyTaskArgs.taskName) Some(FileCopyTaskArgs.taskName)
) )
} yield job
def periodicQuery[F[_]: Sync](args: PeriodicQueryArgs, submitter: AccountId): F[RJob] = def periodicQuery[F[_]: Sync](
for { args: PeriodicQueryArgs,
id <- Ident.randomId[F] submitter: AccountId
now <- Timestamp.current[F] ): F[Job[PeriodicQueryArgs]] =
job = RJob.newJob( Job.createNew(
id,
PeriodicQueryArgs.taskName, PeriodicQueryArgs.taskName,
submitter.collective, submitter.collective,
args, args,
s"Running periodic query, notify via ${args.channels.map(_.channelType)}", s"Running periodic query, notify via ${args.channels.map(_.channelType)}",
now,
submitter.user, submitter.user,
Priority.Low, Priority.Low,
None None
) )
} yield job
def makePageCount[F[_]: Sync]( def makePageCount[F[_]: Sync](
args: MakePageCountArgs, args: MakePageCountArgs,
account: Option[AccountId] account: Option[AccountId]
): F[RJob] = ): F[Job[MakePageCountArgs]] =
for { Job.createNew(
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
MakePageCountArgs.taskName, MakePageCountArgs.taskName,
account.map(_.collective).getOrElse(DocspellSystem.taskGroup), account.map(_.collective).getOrElse(DocspellSystem.taskGroup),
args, args,
s"Find page-count metadata for ${args.attachment.id}", s"Find page-count metadata for ${args.attachment.id}",
now,
account.map(_.user).getOrElse(DocspellSystem.user), account.map(_.user).getOrElse(DocspellSystem.user),
Priority.Low, Priority.Low,
Some(MakePageCountArgs.taskName / args.attachment) Some(MakePageCountArgs.taskName / args.attachment)
) )
} yield job
def makePreview[F[_]: Sync]( def makePreview[F[_]: Sync](
args: MakePreviewArgs, args: MakePreviewArgs,
account: Option[AccountId] account: Option[AccountId]
): F[RJob] = ): F[Job[MakePreviewArgs]] =
for { Job.createNew(
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
MakePreviewArgs.taskName, MakePreviewArgs.taskName,
account.map(_.collective).getOrElse(DocspellSystem.taskGroup), account.map(_.collective).getOrElse(DocspellSystem.taskGroup),
args, args,
s"Generate preview image", s"Generate preview image",
now,
account.map(_.user).getOrElse(DocspellSystem.user), account.map(_.user).getOrElse(DocspellSystem.user),
Priority.Low, Priority.Low,
Some(MakePreviewArgs.taskName / args.attachment) Some(MakePreviewArgs.taskName / args.attachment)
) )
} yield job
def allPreviews[F[_]: Sync]( def allPreviews[F[_]: Sync](
args: AllPreviewsArgs, args: AllPreviewsArgs,
submitter: Option[Ident] submitter: Option[Ident]
): F[RJob] = ): F[Job[AllPreviewsArgs]] =
for { Job.createNew(
id <- Ident.randomId[F]
now <- Timestamp.current[F]
} yield RJob.newJob(
id,
AllPreviewsArgs.taskName, AllPreviewsArgs.taskName,
args.collective.getOrElse(DocspellSystem.taskGroup), args.collective.getOrElse(DocspellSystem.taskGroup),
args, args,
"Create preview images", "Create preview images",
now,
submitter.getOrElse(DocspellSystem.user), submitter.getOrElse(DocspellSystem.user),
Priority.Low, Priority.Low,
Some(DocspellSystem.allPreviewTaskTracker) Some(DocspellSystem.allPreviewTaskTracker)
@ -135,127 +102,91 @@ object JobFactory extends MailAddressCodec {
collective: Option[Ident], collective: Option[Ident],
submitter: Option[Ident], submitter: Option[Ident],
prio: Priority prio: Priority
): F[RJob] = ): F[Job[ConvertAllPdfArgs]] =
for { Job.createNew(
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
ConvertAllPdfArgs.taskName, ConvertAllPdfArgs.taskName,
collective.getOrElse(DocspellSystem.taskGroup), collective.getOrElse(DocspellSystem.taskGroup),
ConvertAllPdfArgs(collective), ConvertAllPdfArgs(collective),
s"Convert all pdfs not yet converted", s"Convert all pdfs not yet converted",
now,
submitter.getOrElse(DocspellSystem.user), submitter.getOrElse(DocspellSystem.user),
prio, prio,
collective collective
.map(c => c / ConvertAllPdfArgs.taskName) .map(c => c / ConvertAllPdfArgs.taskName)
.orElse(ConvertAllPdfArgs.taskName.some) .orElse(ConvertAllPdfArgs.taskName.some)
) )
} yield job
def reprocessItem[F[_]: Sync]( def reprocessItem[F[_]: Sync](
args: ReProcessItemArgs, args: ReProcessItemArgs,
account: AccountId, account: AccountId,
prio: Priority prio: Priority
): F[RJob] = ): F[Job[ReProcessItemArgs]] =
for { Job.createNew(
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
ReProcessItemArgs.taskName, ReProcessItemArgs.taskName,
account.collective, account.collective,
args, args,
s"Re-process files of item ${args.itemId.id}", s"Re-process files of item ${args.itemId.id}",
now,
account.user, account.user,
prio, prio,
Some(ReProcessItemArgs.taskName / args.itemId) Some(ReProcessItemArgs.taskName / args.itemId)
) )
} yield job
def processItem[F[_]: Sync]( def processItem[F[_]: Sync](
args: ProcessItemArgs, args: ProcessItemArgs,
account: AccountId, account: AccountId,
prio: Priority, prio: Priority,
tracker: Option[Ident] tracker: Option[Ident]
): F[RJob] = ): F[Job[ProcessItemArgs]] =
for { Job.createNew(
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
ProcessItemArgs.taskName, ProcessItemArgs.taskName,
account.collective, account.collective,
args, args,
args.makeSubject, args.makeSubject,
now,
account.user, account.user,
prio, prio,
tracker tracker
) )
} yield job
def processItems[F[_]: Sync]( def processItems[F[_]: Sync](
args: Vector[ProcessItemArgs], args: Vector[ProcessItemArgs],
account: AccountId, account: AccountId,
prio: Priority, prio: Priority,
tracker: Option[Ident] tracker: Option[Ident]
): F[Vector[RJob]] = { ): F[Vector[Job[ProcessItemArgs]]] = {
def create(now: Timestamp, arg: ProcessItemArgs): F[RJob] = def create(arg: ProcessItemArgs): F[Job[ProcessItemArgs]] =
Ident Job.createNew(
.randomId[F]
.map(id =>
RJob.newJob(
id,
ProcessItemArgs.taskName, ProcessItemArgs.taskName,
account.collective, account.collective,
arg, arg,
arg.makeSubject, arg.makeSubject,
now,
account.user, account.user,
prio, prio,
tracker tracker
) )
)
for { args.traverse(create)
now <- Timestamp.current[F]
jobs <- args.traverse(a => create(now, a))
} yield jobs
} }
def reIndexAll[F[_]: Sync]: F[RJob] = def reIndexAll[F[_]: Sync]: F[Job[ReIndexTaskArgs]] =
for { Job.createNew(
id <- Ident.randomId[F]
now <- Timestamp.current[F]
} yield RJob.newJob(
id,
ReIndexTaskArgs.taskName, ReIndexTaskArgs.taskName,
DocspellSystem.taskGroup, DocspellSystem.taskGroup,
ReIndexTaskArgs(None), ReIndexTaskArgs(None),
s"Recreate full-text index", "Recreate full-text index",
now,
DocspellSystem.taskGroup, DocspellSystem.taskGroup,
Priority.Low, Priority.Low,
Some(DocspellSystem.migrationTaskTracker) Some(DocspellSystem.migrationTaskTracker)
) )
def reIndex[F[_]: Sync](account: AccountId): F[RJob] = def reIndex[F[_]: Sync](account: AccountId): F[Job[ReIndexTaskArgs]] = {
for { val args = ReIndexTaskArgs(Some(account.collective))
id <- Ident.randomId[F] Job.createNew(
now <- Timestamp.current[F]
args = ReIndexTaskArgs(Some(account.collective))
} yield RJob.newJob(
id,
ReIndexTaskArgs.taskName, ReIndexTaskArgs.taskName,
account.collective, account.collective,
args, args,
s"Recreate full-text index", "Recreate full-text index",
now,
account.user, account.user,
Priority.Low, Priority.Low,
Some(ReIndexTaskArgs.tracker(args)) Some(ReIndexTaskArgs.tracker(args))
) )
}
} }

View File

@ -18,7 +18,7 @@ import docspell.store.queries.{QCollective, QUser}
import docspell.store.records._ import docspell.store.records._
import docspell.store.{AddResult, Store} import docspell.store.{AddResult, Store}
import com.github.eikek.calev._ import com.github.eikek.calev._
import docspell.scheduler.JobQueue import docspell.scheduler.JobStore
import docspell.scheduler.usertask.{UserTask, UserTaskScope, UserTaskStore} import docspell.scheduler.usertask.{UserTask, UserTaskScope, UserTaskStore}
trait OCollective[F[_]] { trait OCollective[F[_]] {
@ -131,7 +131,7 @@ object OCollective {
def apply[F[_]: Async]( def apply[F[_]: Async](
store: Store[F], store: Store[F],
uts: UserTaskStore[F], uts: UserTaskStore[F],
queue: JobQueue[F], jobStore: JobStore[F],
joex: OJoex[F] joex: OJoex[F]
): Resource[F, OCollective[F]] = ): Resource[F, OCollective[F]] =
Resource.pure[F, OCollective[F]](new OCollective[F] { Resource.pure[F, OCollective[F]](new OCollective[F] {
@ -194,32 +194,32 @@ object OCollective {
for { for {
id <- Ident.randomId[F] id <- Ident.randomId[F]
args = LearnClassifierArgs(collective) args = LearnClassifierArgs(collective)
ut <- UserTask( ut = UserTask(
id, id,
LearnClassifierArgs.taskName, LearnClassifierArgs.taskName,
true, true,
CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All), CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All),
None, None,
args args
).encode.toPeriodicTask(UserTaskScope(collective), args.makeSubject.some) )
job <- ut.toJob _ <- uts
_ <- queue.insert(job) .updateOneTask(UserTaskScope(collective), args.makeSubject.some, ut)
_ <- joex.notifyAllNodes _ <- joex.notifyAllNodes
} yield () } yield ()
def startEmptyTrash(args: EmptyTrashArgs): F[Unit] = def startEmptyTrash(args: EmptyTrashArgs): F[Unit] =
for { for {
id <- Ident.randomId[F] id <- Ident.randomId[F]
ut <- UserTask( ut = UserTask(
id, id,
EmptyTrashArgs.taskName, EmptyTrashArgs.taskName,
true, true,
CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All), CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All),
None, None,
args args
).encode.toPeriodicTask(UserTaskScope(args.collective), args.makeSubject.some) )
job <- ut.toJob _ <- uts
_ <- queue.insert(job) .updateOneTask(UserTaskScope(args.collective), args.makeSubject.some, ut)
_ <- joex.notifyAllNodes _ <- joex.notifyAllNodes
} yield () } yield ()
@ -319,7 +319,7 @@ object OCollective {
AllPreviewsArgs(Some(account.collective), storeMode), AllPreviewsArgs(Some(account.collective), storeMode),
Some(account.user) Some(account.user)
) )
_ <- queue.insertIfNew(job) _ <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F] _ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UpdateResult.success } yield UpdateResult.success

View File

@ -12,17 +12,22 @@ import cats.implicits._
import docspell.backend.JobFactory import docspell.backend.JobFactory
import docspell.backend.ops.OFileRepository.IntegrityResult import docspell.backend.ops.OFileRepository.IntegrityResult
import docspell.common._ import docspell.common._
import docspell.scheduler.JobQueue import docspell.scheduler.{Job, JobStore}
import docspell.store.Store import docspell.store.Store
import docspell.store.records.RJob
import scodec.bits.ByteVector import scodec.bits.ByteVector
trait OFileRepository[F[_]] { trait OFileRepository[F[_]] {
/** Inserts the job or return None if such a job already is running. */ /** Inserts the job or return None if such a job already is running. */
def cloneFileRepository(args: FileCopyTaskArgs, notifyJoex: Boolean): F[Option[RJob]] def cloneFileRepository(
args: FileCopyTaskArgs,
notifyJoex: Boolean
): F[Option[Job[FileCopyTaskArgs]]]
def checkIntegrityAll(part: FileKeyPart, notifyJoex: Boolean): F[Option[RJob]] def checkIntegrityAll(
part: FileKeyPart,
notifyJoex: Boolean
): F[Option[Job[FileIntegrityCheckArgs]]]
def checkIntegrity(key: FileKey, hash: Option[ByteVector]): F[Option[IntegrityResult]] def checkIntegrity(key: FileKey, hash: Option[ByteVector]): F[Option[IntegrityResult]]
} }
@ -33,7 +38,7 @@ object OFileRepository {
def apply[F[_]: Async]( def apply[F[_]: Async](
store: Store[F], store: Store[F],
queue: JobQueue[F], jobStore: JobStore[F],
joex: OJoex[F] joex: OJoex[F]
): Resource[F, OFileRepository[F]] = ): Resource[F, OFileRepository[F]] =
Resource.pure(new OFileRepository[F] { Resource.pure(new OFileRepository[F] {
@ -42,17 +47,20 @@ object OFileRepository {
def cloneFileRepository( def cloneFileRepository(
args: FileCopyTaskArgs, args: FileCopyTaskArgs,
notifyJoex: Boolean notifyJoex: Boolean
): F[Option[RJob]] = ): F[Option[Job[FileCopyTaskArgs]]] =
for { for {
job <- JobFactory.fileCopy(args) job <- JobFactory.fileCopy(args)
flag <- queue.insertIfNew(job) flag <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F] _ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield Option.when(flag)(job) } yield Option.when(flag)(job)
def checkIntegrityAll(part: FileKeyPart, notifyJoex: Boolean): F[Option[RJob]] = def checkIntegrityAll(
part: FileKeyPart,
notifyJoex: Boolean
): F[Option[Job[FileIntegrityCheckArgs]]] =
for { for {
job <- JobFactory.integrityCheck(FileIntegrityCheckArgs(part)) job <- JobFactory.integrityCheck(FileIntegrityCheckArgs(part))
flag <- queue.insertIfNew(job) flag <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F] _ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield Option.when(flag)(job) } yield Option.when(flag)(job)

View File

@ -16,7 +16,7 @@ import docspell.common._
import docspell.ftsclient._ import docspell.ftsclient._
import docspell.query.ItemQuery._ import docspell.query.ItemQuery._
import docspell.query.ItemQueryDsl._ import docspell.query.ItemQueryDsl._
import docspell.scheduler.JobQueue import docspell.scheduler.JobStore
import docspell.store.queries.{QFolder, QItem, SelectedItem} import docspell.store.queries.{QFolder, QItem, SelectedItem}
import docspell.store.records.RJob import docspell.store.records.RJob
import docspell.store.{Store, qb} import docspell.store.{Store, qb}
@ -80,7 +80,7 @@ object OFulltext {
itemSearch: OItemSearch[F], itemSearch: OItemSearch[F],
fts: FtsClient[F], fts: FtsClient[F],
store: Store[F], store: Store[F],
queue: JobQueue[F], jobStore: JobStore[F],
joex: OJoex[F] joex: OJoex[F]
): Resource[F, OFulltext[F]] = ): Resource[F, OFulltext[F]] =
Resource.pure[F, OFulltext[F]](new OFulltext[F] { Resource.pure[F, OFulltext[F]](new OFulltext[F] {
@ -89,7 +89,7 @@ object OFulltext {
for { for {
_ <- logger.info(s"Re-index all.") _ <- logger.info(s"Re-index all.")
job <- JobFactory.reIndexAll[F] job <- JobFactory.reIndexAll[F]
_ <- queue.insertIfNew(job) *> joex.notifyAllNodes _ <- jobStore.insertIfNew(job.encode) *> joex.notifyAllNodes
} yield () } yield ()
def reindexCollective(account: AccountId): F[Unit] = def reindexCollective(account: AccountId): F[Unit] =
@ -101,7 +101,7 @@ object OFulltext {
job <- JobFactory.reIndex(account) job <- JobFactory.reIndex(account)
_ <- _ <-
if (exist.isDefined) ().pure[F] if (exist.isDefined) ().pure[F]
else queue.insertIfNew(job) *> joex.notifyAllNodes else jobStore.insertIfNew(job.encode) *> joex.notifyAllNodes
} yield () } yield ()
def findIndexOnly(maxNoteLen: Int)( def findIndexOnly(maxNoteLen: Int)(
@ -323,9 +323,7 @@ object OFulltext {
def apply[A](implicit ev: ItemId[A]): ItemId[A] = ev def apply[A](implicit ev: ItemId[A]): ItemId[A] = ev
def from[A](f: A => Ident): ItemId[A] = def from[A](f: A => Ident): ItemId[A] =
new ItemId[A] { (a: A) => f(a)
def itemId(a: A) = f(a)
}
implicit val listItemId: ItemId[ListItem] = implicit val listItemId: ItemId[ListItem] =
ItemId.from(_.id) ItemId.from(_.id)

View File

@ -17,7 +17,7 @@ import docspell.common._
import docspell.ftsclient.FtsClient import docspell.ftsclient.FtsClient
import docspell.logging.Logger import docspell.logging.Logger
import docspell.notification.api.Event import docspell.notification.api.Event
import docspell.scheduler.JobQueue import docspell.scheduler.JobStore
import docspell.store.queries.{QAttachment, QItem, QMoveAttachment} import docspell.store.queries.{QAttachment, QItem, QMoveAttachment}
import docspell.store.records._ import docspell.store.records._
import docspell.store.{AddResult, Store, UpdateResult} import docspell.store.{AddResult, Store, UpdateResult}
@ -226,7 +226,7 @@ object OItem {
store: Store[F], store: Store[F],
fts: FtsClient[F], fts: FtsClient[F],
createIndex: CreateIndex[F], createIndex: CreateIndex[F],
queue: JobQueue[F], jobStore: JobStore[F],
joex: OJoex[F] joex: OJoex[F]
): Resource[F, OItem[F]] = ): Resource[F, OItem[F]] =
for { for {
@ -286,7 +286,7 @@ object OItem {
) )
ev = Event.TagsChanged.partial( ev = Event.TagsChanged.partial(
itemIds, itemIds,
added.toList.flatten.map(_.id).toList, added.toList.flatten.map(_.id),
Nil Nil
) )
} yield AttachedEvent(UpdateResult.success)(ev)) } yield AttachedEvent(UpdateResult.success)(ev))
@ -761,7 +761,7 @@ object OItem {
job <- OptionT.liftF( job <- OptionT.liftF(
JobFactory.reprocessItem[F](args, account, Priority.Low) JobFactory.reprocessItem[F](args, account, Priority.Low)
) )
_ <- OptionT.liftF(queue.insertIfNew(job)) _ <- OptionT.liftF(jobStore.insertIfNew(job.encode))
_ <- OptionT.liftF(if (notifyJoex) joex.notifyAllNodes else ().pure[F]) _ <- OptionT.liftF(if (notifyJoex) joex.notifyAllNodes else ().pure[F])
} yield UpdateResult.success).getOrElse(UpdateResult.notFound) } yield UpdateResult.success).getOrElse(UpdateResult.notFound)
@ -775,7 +775,8 @@ object OItem {
jobs <- items jobs <- items
.map(item => ReProcessItemArgs(item, Nil)) .map(item => ReProcessItemArgs(item, Nil))
.traverse(arg => JobFactory.reprocessItem[F](arg, account, Priority.Low)) .traverse(arg => JobFactory.reprocessItem[F](arg, account, Priority.Low))
_ <- queue.insertAllIfNew(jobs) .map(_.map(_.encode))
_ <- jobStore.insertAllIfNew(jobs)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F] _ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield items.size) } yield items.size)
@ -786,7 +787,7 @@ object OItem {
): F[UpdateResult] = ): F[UpdateResult] =
for { for {
job <- JobFactory.convertAllPdfs[F](collective, submitter, Priority.Low) job <- JobFactory.convertAllPdfs[F](collective, submitter, Priority.Low)
_ <- queue.insertIfNew(job) _ <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F] _ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UpdateResult.success } yield UpdateResult.success
@ -797,7 +798,7 @@ object OItem {
): F[UpdateResult] = ): F[UpdateResult] =
for { for {
job <- JobFactory.makePreview[F](args, account.some) job <- JobFactory.makePreview[F](args, account.some)
_ <- queue.insertIfNew(job) _ <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F] _ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UpdateResult.success } yield UpdateResult.success
@ -807,7 +808,7 @@ object OItem {
): F[UpdateResult] = ): F[UpdateResult] =
for { for {
job <- JobFactory.allPreviews[F](AllPreviewsArgs(None, storeMode), None) job <- JobFactory.allPreviews[F](AllPreviewsArgs(None, storeMode), None)
_ <- queue.insertIfNew(job) _ <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F] _ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UpdateResult.success } yield UpdateResult.success

View File

@ -13,7 +13,7 @@ import cats.implicits._
import fs2.Stream import fs2.Stream
import docspell.backend.JobFactory import docspell.backend.JobFactory
import docspell.common._ import docspell.common._
import docspell.scheduler.JobQueue import docspell.scheduler.{Job, JobStore}
import docspell.store.Store import docspell.store.Store
import docspell.store.records._ import docspell.store.records._
@ -107,7 +107,7 @@ object OUpload {
def apply[F[_]: Sync]( def apply[F[_]: Sync](
store: Store[F], store: Store[F],
queue: JobQueue[F], jobStore: JobStore[F],
joex: OJoex[F] joex: OJoex[F]
): Resource[F, OUpload[F]] = ): Resource[F, OUpload[F]] =
Resource.pure[F, OUpload[F]](new OUpload[F] { Resource.pure[F, OUpload[F]](new OUpload[F] {
@ -186,10 +186,10 @@ object OUpload {
private def submitJobs( private def submitJobs(
notifyJoex: Boolean notifyJoex: Boolean
)(jobs: Vector[RJob]): F[OUpload.UploadResult] = )(jobs: Vector[Job[String]]): F[OUpload.UploadResult] =
for { for {
_ <- logger.debug(s"Storing jobs: $jobs") _ <- logger.debug(s"Storing jobs: $jobs")
_ <- queue.insertAll(jobs) _ <- jobStore.insertAll(jobs)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F] _ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UploadResult.Success } yield UploadResult.Success
@ -243,7 +243,9 @@ object OUpload {
account: AccountId, account: AccountId,
prio: Priority, prio: Priority,
tracker: Option[Ident] tracker: Option[Ident]
): F[Vector[RJob]] = ): F[Vector[Job[String]]] =
JobFactory.processItems[F](args, account, prio, tracker) JobFactory
.processItems[F](args, account, prio, tracker)
.map(_.map(_.encode))
}) })
} }

View File

@ -12,7 +12,6 @@ import cats.implicits._
import fs2.Stream import fs2.Stream
import docspell.common._ import docspell.common._
import docspell.notification.api.{ChannelRef, PeriodicDueItemsArgs, PeriodicQueryArgs} import docspell.notification.api.{ChannelRef, PeriodicDueItemsArgs, PeriodicQueryArgs}
import docspell.scheduler.JobQueue
import docspell.scheduler.usertask.{UserTask, UserTaskScope, UserTaskStore} import docspell.scheduler.usertask.{UserTask, UserTaskScope, UserTaskStore}
import docspell.store.Store import docspell.store.Store
import docspell.store.records.RNotificationChannel import docspell.store.records.RNotificationChannel
@ -84,7 +83,6 @@ object OUserTask {
def apply[F[_]: Async]( def apply[F[_]: Async](
taskStore: UserTaskStore[F], taskStore: UserTaskStore[F],
store: Store[F], store: Store[F],
queue: JobQueue[F],
joex: OJoex[F] joex: OJoex[F]
): Resource[F, OUserTask[F]] = ): Resource[F, OUserTask[F]] =
Resource.pure[F, OUserTask[F]](new OUserTask[F] { Resource.pure[F, OUserTask[F]](new OUserTask[F] {
@ -93,10 +91,8 @@ object OUserTask {
implicit E: Encoder[A] implicit E: Encoder[A]
): F[Unit] = ): F[Unit] =
for { for {
ptask <- task.encode.toPeriodicTask(scope, subject) _ <- taskStore.executeNow(scope, subject, task)
job <- ptask.toJob _ <- joex.notifyAllNodes
_ <- queue.insert(job)
_ <- joex.notifyPeriodicTasks
} yield () } yield ()
def getScanMailbox(scope: UserTaskScope): Stream[F, UserTask[ScanMailboxArgs]] = def getScanMailbox(scope: UserTaskScope): Stream[F, UserTask[ScanMailboxArgs]] =

View File

@ -6,10 +6,8 @@
package docspell.common.syntax package docspell.common.syntax
import cats.implicits._
import io.circe.Decoder import io.circe.Decoder
import io.circe.parser._ import io.circe.parser
trait StringSyntax { trait StringSyntax {
implicit class EvenMoreStringOps(s: String) { implicit class EvenMoreStringOps(s: String) {
@ -18,9 +16,8 @@ trait StringSyntax {
def parseJsonAs[A](implicit d: Decoder[A]): Either[Throwable, A] = def parseJsonAs[A](implicit d: Decoder[A]): Either[Throwable, A] =
for { for {
json <- parse(s).leftMap(_.underlying) json <- parser.decode[A](s)
value <- json.as[A] } yield json
} yield value
} }
} }

View File

@ -31,16 +31,11 @@ import docspell.joex.process.ItemHandler
import docspell.joex.process.ReProcessItem import docspell.joex.process.ReProcessItem
import docspell.joex.scanmailbox._ import docspell.joex.scanmailbox._
import docspell.scheduler._ import docspell.scheduler._
import docspell.scheduler.impl.{ import docspell.scheduler.impl.{JobStoreModuleBuilder, SchedulerModuleBuilder}
PeriodicSchedulerBuilder,
PeriodicTaskStore,
SchedulerBuilder
}
import docspell.joex.updatecheck._ import docspell.joex.updatecheck._
import docspell.notification.api.NotificationModule import docspell.notification.api.NotificationModule
import docspell.notification.impl.NotificationModuleImpl import docspell.notification.impl.NotificationModuleImpl
import docspell.pubsub.api.{PubSub, PubSubT} import docspell.pubsub.api.{PubSub, PubSubT}
import docspell.scheduler.msg.JobQueuePublish
import docspell.scheduler.usertask.{UserTaskScope, UserTaskStore} import docspell.scheduler.usertask.{UserTaskScope, UserTaskStore}
import docspell.store.Store import docspell.store.Store
import docspell.store.records.{REmptyTrashSetting, RJobLog} import docspell.store.records.{REmptyTrashSetting, RJobLog}
@ -50,8 +45,8 @@ import org.http4s.client.Client
final class JoexAppImpl[F[_]: Async]( final class JoexAppImpl[F[_]: Async](
cfg: Config, cfg: Config,
store: Store[F], store: Store[F],
queue: JobQueue[F], uts: UserTaskStore[F],
pstore: PeriodicTaskStore[F], jobStore: JobStore[F],
termSignal: SignallingRef[F, Boolean], termSignal: SignallingRef[F, Boolean],
notificationMod: NotificationModule[F], notificationMod: NotificationModule[F],
val scheduler: Scheduler[F], val scheduler: Scheduler[F],
@ -82,24 +77,23 @@ final class JoexAppImpl[F[_]: Async](
private def scheduleBackgroundTasks: F[Unit] = private def scheduleBackgroundTasks: F[Unit] =
HouseKeepingTask HouseKeepingTask
.periodicTask[F](cfg.houseKeeping.schedule) .periodicTask[F](cfg.houseKeeping.schedule)
.flatMap(pstore.insert) *> .flatMap(t => uts.updateTask(UserTaskScope.system, t.summary, t)) *>
scheduleEmptyTrashTasks *> scheduleEmptyTrashTasks *>
UpdateCheckTask UpdateCheckTask
.periodicTask(cfg.updateCheck) .periodicTask(cfg.updateCheck)
.flatMap(pstore.insert) *> .flatMap(t => uts.updateTask(UserTaskScope.system, t.summary, t)) *>
MigrationTask.job.flatMap(queue.insertIfNew) *> MigrationTask.job.flatMap(jobStore.insertIfNew) *>
AllPreviewsTask AllPreviewsTask
.job(MakePreviewArgs.StoreMode.WhenMissing, None) .job(MakePreviewArgs.StoreMode.WhenMissing, None)
.flatMap(queue.insertIfNew) *> .flatMap(jobStore.insertIfNew) *>
AllPageCountTask.job.flatMap(queue.insertIfNew).as(()) AllPageCountTask.job.flatMap(jobStore.insertIfNew).void
private def scheduleEmptyTrashTasks: F[Unit] = private def scheduleEmptyTrashTasks: F[Unit] =
store store
.transact( .transact(
REmptyTrashSetting.findForAllCollectives(OCollective.EmptyTrash.default, 50) REmptyTrashSetting.findForAllCollectives(OCollective.EmptyTrash.default, 50)
) )
.evalMap(es => .evalMap { es =>
UserTaskStore(store).use { uts =>
val args = EmptyTrashArgs(es.cid, es.minAge) val args = EmptyTrashArgs(es.cid, es.minAge)
uts.updateOneTask( uts.updateOneTask(
UserTaskScope(args.collective), UserTaskScope(args.collective),
@ -107,7 +101,6 @@ final class JoexAppImpl[F[_]: Async](
EmptyTrashTask.userTask(args, es.schedule) EmptyTrashTask.userTask(args, es.schedule)
) )
} }
)
.compile .compile
.drain .drain
@ -123,30 +116,36 @@ object JoexAppImpl extends MailAddressCodec {
pubSub: PubSub[F] pubSub: PubSub[F]
): Resource[F, JoexApp[F]] = ): Resource[F, JoexApp[F]] =
for { for {
pstore <- PeriodicTaskStore.create(store) joexLogger <- Resource.pure(docspell.logging.getLogger[F](s"joex-${cfg.appId.id}"))
joexLogger = docspell.logging.getLogger[F](s"joex-${cfg.appId.id}")
pubSubT = PubSubT(pubSub, joexLogger) pubSubT = PubSubT(pubSub, joexLogger)
javaEmil = javaEmil =
JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug)) JavaMailEmil(Settings.defaultSettings.copy(debug = cfg.mailDebug))
notificationMod <- Resource.eval( notificationMod <- Resource.eval(
NotificationModuleImpl[F](store, javaEmil, httpClient, 200) NotificationModuleImpl[F](store, javaEmil, httpClient, 200)
) )
queue <- JobQueuePublish(store, pubSubT, notificationMod)
jobStoreModule = JobStoreModuleBuilder(store)
.withPubsub(pubSubT)
.withEventSink(notificationMod)
.build
joex <- OJoex(pubSubT) joex <- OJoex(pubSubT)
upload <- OUpload(store, queue, joex) upload <- OUpload(store, jobStoreModule.jobs, joex)
fts <- createFtsClient(cfg)(httpClient) fts <- createFtsClient(cfg)(httpClient)
createIndex <- CreateIndex.resource(fts, store) createIndex <- CreateIndex.resource(fts, store)
itemOps <- OItem(store, fts, createIndex, queue, joex) itemOps <- OItem(store, fts, createIndex, jobStoreModule.jobs, joex)
itemSearchOps <- OItemSearch(store) itemSearchOps <- OItemSearch(store)
analyser <- TextAnalyser.create[F](cfg.textAnalysis.textAnalysisConfig) analyser <- TextAnalyser.create[F](cfg.textAnalysis.textAnalysisConfig)
regexNer <- RegexNerFile(cfg.textAnalysis.regexNerFileConfig, store) regexNer <- RegexNerFile(cfg.textAnalysis.regexNerFileConfig, store)
updateCheck <- UpdateCheck.resource(httpClient) updateCheck <- UpdateCheck.resource(httpClient)
notification <- ONotification(store, notificationMod) notification <- ONotification(store, notificationMod)
fileRepo <- OFileRepository(store, queue, joex) fileRepo <- OFileRepository(store, jobStoreModule.jobs, joex)
sch <- SchedulerBuilder(cfg.scheduler, store)
.withQueue(queue) schedulerModule <- SchedulerModuleBuilder(jobStoreModule)
.withPubSub(pubSubT) .withSchedulerConfig(cfg.scheduler)
.withEventSink(notificationMod) .withPeriodicSchedulerConfig(cfg.periodicScheduler)
.withTaskRegistry(JobTaskRegistry
.empty[F]
.withTask( .withTask(
JobTask.json( JobTask.json(
ProcessItemArgs.taskName, ProcessItemArgs.taskName,
@ -199,7 +198,7 @@ object JoexAppImpl extends MailAddressCodec {
.withTask( .withTask(
JobTask.json( JobTask.json(
ConvertAllPdfArgs.taskName, ConvertAllPdfArgs.taskName,
ConvertAllPdfTask[F](queue, joex), ConvertAllPdfTask[F](jobStoreModule.jobs, joex),
ConvertAllPdfTask.onCancel[F] ConvertAllPdfTask.onCancel[F]
) )
) )
@ -220,7 +219,7 @@ object JoexAppImpl extends MailAddressCodec {
.withTask( .withTask(
JobTask.json( JobTask.json(
AllPreviewsArgs.taskName, AllPreviewsArgs.taskName,
AllPreviewsTask[F](queue, joex), AllPreviewsTask[F](jobStoreModule.jobs, joex),
AllPreviewsTask.onCancel[F] AllPreviewsTask.onCancel[F]
) )
) )
@ -234,7 +233,7 @@ object JoexAppImpl extends MailAddressCodec {
.withTask( .withTask(
JobTask.json( JobTask.json(
AllPageCountTask.taskName, AllPageCountTask.taskName,
AllPageCountTask[F](queue, joex), AllPageCountTask[F](jobStoreModule.jobs, joex),
AllPageCountTask.onCancel[F] AllPageCountTask.onCancel[F]
) )
) )
@ -286,23 +285,17 @@ object JoexAppImpl extends MailAddressCodec {
FileIntegrityCheckTask.onCancel[F] FileIntegrityCheckTask.onCancel[F]
) )
) )
.resource
psch <- PeriodicSchedulerBuilder.build(
cfg.periodicScheduler,
sch,
queue,
pstore,
pubSubT
) )
.resource
app = new JoexAppImpl( app = new JoexAppImpl(
cfg, cfg,
store, store,
queue, jobStoreModule.userTasks,
pstore, jobStoreModule.jobs,
termSignal, termSignal,
notificationMod, notificationMod,
sch, schedulerModule.scheduler,
psch schedulerModule.periodicScheduler
) )
appR <- Resource.make(app.init.map(_ => app))(_.initShutdown) appR <- Resource.make(app.init.map(_ => app))(_.initShutdown)
} yield appR } yield appR
@ -312,4 +305,5 @@ object JoexAppImpl extends MailAddressCodec {
)(client: Client[F]): Resource[F, FtsClient[F]] = )(client: Client[F]): Resource[F, FtsClient[F]] =
if (cfg.fullTextSearch.enabled) SolrFtsClient(cfg.fullTextSearch.solr, client) if (cfg.fullTextSearch.enabled) SolrFtsClient(cfg.fullTextSearch.solr, client)
else Resource.pure[F, FtsClient[F]](FtsClient.none[F]) else Resource.pure[F, FtsClient[F]](FtsClient.none[F])
} }

View File

@ -8,13 +8,11 @@ package docspell.joex.fts
import cats.effect._ import cats.effect._
import cats.implicits._ import cats.implicits._
import docspell.backend.fulltext.CreateIndex import docspell.backend.fulltext.CreateIndex
import docspell.common._ import docspell.common._
import docspell.ftsclient._ import docspell.ftsclient._
import docspell.joex.Config import docspell.joex.Config
import docspell.scheduler.Task import docspell.scheduler.{Job, Task}
import docspell.store.records.RJob
object MigrationTask { object MigrationTask {
val taskName = Ident.unsafe("full-text-index") val taskName = Ident.unsafe("full-text-index")
@ -38,21 +36,18 @@ object MigrationTask {
def onCancel[F[_]]: Task[F, Unit, Unit] = def onCancel[F[_]]: Task[F, Unit, Unit] =
Task.log[F, Unit](_.warn("Cancelling full-text-index task")) Task.log[F, Unit](_.warn("Cancelling full-text-index task"))
def job[F[_]: Sync]: F[RJob] = def job[F[_]: Sync]: F[Job[String]] =
for { Job
id <- Ident.randomId[F] .createNew(
now <- Timestamp.current[F]
} yield RJob.newJob(
id,
taskName, taskName,
DocspellSystem.taskGroup, DocspellSystem.taskGroup,
(), (),
"Create full-text index", "Create full-text index",
now,
DocspellSystem.taskGroup, DocspellSystem.taskGroup,
Priority.Low, Priority.Low,
Some(DocspellSystem.migrationTaskTracker) Some(DocspellSystem.migrationTaskTracker)
) )
.map(_.encode)
def migrationTasks[F[_]: Async](fts: FtsClient[F]): F[List[Migration[F]]] = def migrationTasks[F[_]: Async](fts: FtsClient[F]): F[List[Migration[F]]] =
fts.initialize.map(_.map(fm => Migration.from(fm))) fts.initialize.map(_.map(fm => Migration.from(fm)))

View File

@ -13,9 +13,8 @@ import docspell.common._
import docspell.joex.Config import docspell.joex.Config
import docspell.joex.filecopy.FileIntegrityCheckTask import docspell.joex.filecopy.FileIntegrityCheckTask
import docspell.scheduler.{JobTaskResultEncoder, Task} import docspell.scheduler.{JobTaskResultEncoder, Task}
import docspell.store.records._
import com.github.eikek.calev._ import com.github.eikek.calev._
import docspell.scheduler.usertask.{QUserTask, UserTaskScope} import docspell.scheduler.usertask.UserTask
import io.circe.Encoder import io.circe.Encoder
import io.circe.generic.semiauto.deriveEncoder import io.circe.generic.semiauto.deriveEncoder
@ -45,19 +44,15 @@ object HouseKeepingTask {
def onCancel[F[_]]: Task[F, Unit, Unit] = def onCancel[F[_]]: Task[F, Unit, Unit] =
Task.log[F, Unit](_.warn("Cancelling house-keeping task")) Task.log[F, Unit](_.warn("Cancelling house-keeping task"))
def periodicTask[F[_]: Sync](ce: CalEvent): F[RPeriodicTask] = def periodicTask[F[_]: Sync](ce: CalEvent): F[UserTask[Unit]] =
QUserTask UserTask(
.createJson( periodicId,
true,
UserTaskScope(DocspellSystem.taskGroup),
taskName, taskName,
(), true,
"Docspell house-keeping",
Priority.Low,
ce, ce,
None "Docspell house-keeping".some,
) ()
.map(_.copy(id = periodicId)) ).pure[F]
case class Result( case class Result(
checkNodes: CleanupResult, checkNodes: CleanupResult,

View File

@ -12,20 +12,19 @@ import fs2.{Chunk, Stream}
import docspell.backend.JobFactory import docspell.backend.JobFactory
import docspell.backend.ops.OJoex import docspell.backend.ops.OJoex
import docspell.common._ import docspell.common._
import docspell.scheduler.{Context, JobQueue, Task} import docspell.scheduler.{Context, Job, JobStore, Task}
import docspell.store.records.RAttachment import docspell.store.records.RAttachment
import docspell.store.records.RJob
object AllPageCountTask { object AllPageCountTask {
val taskName = Ident.unsafe("all-page-count") val taskName = Ident.unsafe("all-page-count")
type Args = Unit type Args = Unit
def apply[F[_]: Sync](queue: JobQueue[F], joex: OJoex[F]): Task[F, Args, Unit] = def apply[F[_]: Sync](jobStore: JobStore[F], joex: OJoex[F]): Task[F, Args, Unit] =
Task { ctx => Task { ctx =>
for { for {
_ <- ctx.logger.info("Generating previews for attachments") _ <- ctx.logger.info("Generating previews for attachments")
n <- submitConversionJobs(ctx, queue) n <- submitConversionJobs(ctx, jobStore)
_ <- ctx.logger.info(s"Submitted $n jobs") _ <- ctx.logger.info(s"Submitted $n jobs")
_ <- joex.notifyAllNodes _ <- joex.notifyAllNodes
} yield () } yield ()
@ -36,14 +35,14 @@ object AllPageCountTask {
def submitConversionJobs[F[_]: Sync]( def submitConversionJobs[F[_]: Sync](
ctx: Context[F, Args], ctx: Context[F, Args],
queue: JobQueue[F] jobStore: JobStore[F]
): F[Int] = ): F[Int] =
ctx.store ctx.store
.transact(findAttachments) .transact(findAttachments)
.chunks .chunks
.flatMap(createJobs[F]) .flatMap(createJobs[F])
.chunks .chunks
.evalMap(jobs => queue.insertAllIfNew(jobs.toVector).map(_ => jobs.size)) .evalMap(jobs => jobStore.insertAllIfNew(jobs.toVector).map(_ => jobs.size))
.evalTap(n => ctx.logger.debug(s"Submitted $n jobs …")) .evalTap(n => ctx.logger.debug(s"Submitted $n jobs …"))
.compile .compile
.foldMonoid .foldMonoid
@ -51,28 +50,25 @@ object AllPageCountTask {
private def findAttachments[F[_]] = private def findAttachments[F[_]] =
RAttachment.findAllWithoutPageCount(50) RAttachment.findAllWithoutPageCount(50)
private def createJobs[F[_]: Sync](ras: Chunk[RAttachment]): Stream[F, RJob] = { private def createJobs[F[_]: Sync](ras: Chunk[RAttachment]): Stream[F, Job[String]] = {
def mkJob(ra: RAttachment): F[RJob] = def mkJob(ra: RAttachment): F[Job[MakePageCountArgs]] =
JobFactory.makePageCount(MakePageCountArgs(ra.id), None) JobFactory.makePageCount(MakePageCountArgs(ra.id), None)
val jobs = ras.traverse(mkJob) val jobs = ras.traverse(mkJob)
Stream.evalUnChunk(jobs) Stream.evalUnChunk(jobs).map(_.encode)
} }
def job[F[_]: Sync]: F[RJob] = def job[F[_]: Sync]: F[Job[String]] =
for { Job
id <- Ident.randomId[F] .createNew(
now <- Timestamp.current[F]
} yield RJob.newJob(
id,
AllPageCountTask.taskName, AllPageCountTask.taskName,
DocspellSystem.taskGroup, DocspellSystem.taskGroup,
(), (),
"Create all page-counts", "Create all page-counts",
now,
DocspellSystem.taskGroup, DocspellSystem.taskGroup,
Priority.Low, Priority.Low,
Some(DocspellSystem.allPageCountTaskTracker) Some(DocspellSystem.allPageCountTaskTracker)
) )
.map(_.encode)
} }

View File

@ -11,9 +11,8 @@ import cats.implicits._
import fs2.{Chunk, Stream} import fs2.{Chunk, Stream}
import docspell.backend.ops.OJoex import docspell.backend.ops.OJoex
import docspell.common._ import docspell.common._
import docspell.scheduler.{Context, JobQueue, Task} import docspell.scheduler.{Context, Job, JobStore, Task}
import docspell.store.records.RAttachment import docspell.store.records.RAttachment
import docspell.store.records._
/* A task to find all non-converted pdf files (of a collective, or /* A task to find all non-converted pdf files (of a collective, or
* all) and converting them using ocrmypdf by submitting a job for * all) and converting them using ocrmypdf by submitting a job for
@ -22,11 +21,11 @@ import docspell.store.records._
object ConvertAllPdfTask { object ConvertAllPdfTask {
type Args = ConvertAllPdfArgs type Args = ConvertAllPdfArgs
def apply[F[_]: Sync](queue: JobQueue[F], joex: OJoex[F]): Task[F, Args, Unit] = def apply[F[_]: Sync](jobStore: JobStore[F], joex: OJoex[F]): Task[F, Args, Unit] =
Task { ctx => Task { ctx =>
for { for {
_ <- ctx.logger.info("Converting pdfs using ocrmypdf") _ <- ctx.logger.info("Converting pdfs using ocrmypdf")
n <- submitConversionJobs(ctx, queue) n <- submitConversionJobs(ctx, jobStore)
_ <- ctx.logger.info(s"Submitted $n file conversion jobs") _ <- ctx.logger.info(s"Submitted $n file conversion jobs")
_ <- joex.notifyAllNodes _ <- joex.notifyAllNodes
} yield () } yield ()
@ -37,40 +36,35 @@ object ConvertAllPdfTask {
def submitConversionJobs[F[_]: Sync]( def submitConversionJobs[F[_]: Sync](
ctx: Context[F, Args], ctx: Context[F, Args],
queue: JobQueue[F] jobStore: JobStore[F]
): F[Int] = ): F[Int] =
ctx.store ctx.store
.transact(RAttachment.findNonConvertedPdf(ctx.args.collective, 50)) .transact(RAttachment.findNonConvertedPdf(ctx.args.collective, 50))
.chunks .chunks
.flatMap(createJobs[F](ctx)) .flatMap(createJobs[F](ctx))
.chunks .chunks
.evalMap(jobs => queue.insertAllIfNew(jobs.toVector).map(_ => jobs.size)) .evalMap(jobs => jobStore.insertAllIfNew(jobs.toVector).map(_ => jobs.size))
.evalTap(n => ctx.logger.debug(s"Submitted $n jobs …")) .evalTap(n => ctx.logger.debug(s"Submitted $n jobs …"))
.compile .compile
.foldMonoid .foldMonoid
private def createJobs[F[_]: Sync]( private def createJobs[F[_]: Sync](
ctx: Context[F, Args] ctx: Context[F, Args]
)(ras: Chunk[RAttachment]): Stream[F, RJob] = { )(ras: Chunk[RAttachment]): Stream[F, Job[String]] = {
val collectiveOrSystem = ctx.args.collective.getOrElse(DocspellSystem.taskGroup) val collectiveOrSystem = ctx.args.collective.getOrElse(DocspellSystem.taskGroup)
def mkJob(ra: RAttachment): F[RJob] = def mkJob(ra: RAttachment): F[Job[PdfConvTask.Args]] =
for { Job.createNew(
id <- Ident.randomId[F]
now <- Timestamp.current[F]
} yield RJob.newJob(
id,
PdfConvTask.taskName, PdfConvTask.taskName,
collectiveOrSystem, collectiveOrSystem,
PdfConvTask.Args(ra.id), PdfConvTask.Args(ra.id),
s"Convert pdf ${ra.id.id}/${ra.name.getOrElse("-")}", s"Convert pdf ${ra.id.id}/${ra.name.getOrElse("-")}",
now,
collectiveOrSystem, collectiveOrSystem,
Priority.Low, Priority.Low,
Some(PdfConvTask.taskName / ra.id) Some(PdfConvTask.taskName / ra.id)
) )
val jobs = ras.traverse(mkJob) val jobs = ras.traverse(mkJob)
Stream.evalUnChunk(jobs) Stream.evalUnChunk(jobs).map(_.encode)
} }
} }

View File

@ -13,19 +13,18 @@ import docspell.backend.JobFactory
import docspell.backend.ops.OJoex import docspell.backend.ops.OJoex
import docspell.common.MakePreviewArgs.StoreMode import docspell.common.MakePreviewArgs.StoreMode
import docspell.common._ import docspell.common._
import docspell.scheduler.{Context, JobQueue, Task} import docspell.scheduler.{Context, Job, JobStore, Task}
import docspell.store.records.RAttachment import docspell.store.records.RAttachment
import docspell.store.records.RJob
object AllPreviewsTask { object AllPreviewsTask {
type Args = AllPreviewsArgs type Args = AllPreviewsArgs
def apply[F[_]: Sync](queue: JobQueue[F], joex: OJoex[F]): Task[F, Args, Unit] = def apply[F[_]: Sync](jobStore: JobStore[F], joex: OJoex[F]): Task[F, Args, Unit] =
Task { ctx => Task { ctx =>
for { for {
_ <- ctx.logger.info("Generating previews for attachments") _ <- ctx.logger.info("Generating previews for attachments")
n <- submitConversionJobs(ctx, queue) n <- submitConversionJobs(ctx, jobStore)
_ <- ctx.logger.info(s"Submitted $n jobs") _ <- ctx.logger.info(s"Submitted $n jobs")
_ <- joex.notifyAllNodes _ <- joex.notifyAllNodes
} yield () } yield ()
@ -36,14 +35,16 @@ object AllPreviewsTask {
def submitConversionJobs[F[_]: Sync]( def submitConversionJobs[F[_]: Sync](
ctx: Context[F, Args], ctx: Context[F, Args],
queue: JobQueue[F] jobStore: JobStore[F]
): F[Int] = ): F[Int] =
ctx.store ctx.store
.transact(findAttachments(ctx)) .transact(findAttachments(ctx))
.chunks .chunks
.flatMap(createJobs[F](ctx)) .flatMap(createJobs[F](ctx))
.chunks .chunks
.evalMap(jobs => queue.insertAllIfNew(jobs.toVector).map(_ => jobs.size)) .evalMap(jobs =>
jobStore.insertAllIfNew(jobs.map(_.encode).toVector).map(_ => jobs.size)
)
.evalTap(n => ctx.logger.debug(s"Submitted $n jobs …")) .evalTap(n => ctx.logger.debug(s"Submitted $n jobs …"))
.compile .compile
.foldMonoid .foldMonoid
@ -58,13 +59,13 @@ object AllPreviewsTask {
private def createJobs[F[_]: Sync]( private def createJobs[F[_]: Sync](
ctx: Context[F, Args] ctx: Context[F, Args]
)(ras: Chunk[RAttachment]): Stream[F, RJob] = { )(ras: Chunk[RAttachment]): Stream[F, Job[MakePreviewArgs]] = {
val collectiveOrSystem = { val collectiveOrSystem = {
val cid = ctx.args.collective.getOrElse(DocspellSystem.taskGroup) val cid = ctx.args.collective.getOrElse(DocspellSystem.taskGroup)
AccountId(cid, DocspellSystem.user) AccountId(cid, DocspellSystem.user)
} }
def mkJob(ra: RAttachment): F[RJob] = def mkJob(ra: RAttachment): F[Job[MakePreviewArgs]] =
JobFactory.makePreview( JobFactory.makePreview(
MakePreviewArgs(ra.id, ctx.args.storeMode), MakePreviewArgs(ra.id, ctx.args.storeMode),
collectiveOrSystem.some collectiveOrSystem.some
@ -74,7 +75,10 @@ object AllPreviewsTask {
Stream.evalUnChunk(jobs) Stream.evalUnChunk(jobs)
} }
def job[F[_]: Sync](storeMode: MakePreviewArgs.StoreMode, cid: Option[Ident]): F[RJob] = def job[F[_]: Sync](
JobFactory.allPreviews(AllPreviewsArgs(cid, storeMode), None) storeMode: MakePreviewArgs.StoreMode,
cid: Option[Ident]
): F[Job[String]] =
JobFactory.allPreviews(AllPreviewsArgs(cid, storeMode), None).map(_.encode)
} }

View File

@ -65,7 +65,7 @@ object ItemHandler {
.map(_ => data) .map(_ => data)
) )
def isLastRetry[F[_]: Sync]: Task[F, Args, Boolean] = def isLastRetry[F[_]]: Task[F, Args, Boolean] =
Task(_.isLastRetry) Task(_.isLastRetry)
def safeProcess[F[_]: Async]( def safeProcess[F[_]: Async](

View File

@ -141,7 +141,7 @@ object ReProcessItem {
lang1.orElse(lang2).getOrElse(Language.German) lang1.orElse(lang2).getOrElse(Language.German)
} }
def isLastRetry[F[_]: Sync]: Task[F, Args, Boolean] = def isLastRetry[F[_]]: Task[F, Args, Boolean] =
Task(_.isLastRetry) Task(_.isLastRetry)
def safeProcess[F[_]: Async]( def safeProcess[F[_]: Async](

View File

@ -12,8 +12,7 @@ import cats.implicits._
import docspell.common._ import docspell.common._
import docspell.scheduler.Context import docspell.scheduler.Context
import docspell.scheduler.Task import docspell.scheduler.Task
import docspell.scheduler.usertask.{UserTask, UserTaskScope} import docspell.scheduler.usertask.UserTask
import docspell.store.records.RPeriodicTask
import docspell.store.records.RUserEmail import docspell.store.records.RUserEmail
import emil._ import emil._
@ -25,18 +24,15 @@ object UpdateCheckTask {
def onCancel[F[_]]: Task[F, Args, Unit] = def onCancel[F[_]]: Task[F, Args, Unit] =
Task.log(_.warn("Cancelling update-check task")) Task.log(_.warn("Cancelling update-check task"))
def periodicTask[F[_]: Sync](cfg: UpdateCheckConfig): F[RPeriodicTask] = def periodicTask[F[_]: Sync](cfg: UpdateCheckConfig): F[UserTask[Unit]] =
UserTask( UserTask(
Ident.unsafe("docspell-update-check"), Ident.unsafe("docspell-update-check"),
taskName, taskName,
cfg.enabled, cfg.enabled,
cfg.schedule, cfg.schedule,
None, "Docspell Update Check".some,
() ()
).encode.toPeriodicTask( ).pure[F]
UserTaskScope(cfg.senderAccount.collective),
"Docspell Update Check".some
)
def apply[F[_]: Async]( def apply[F[_]: Async](
cfg: UpdateCheckConfig, cfg: UpdateCheckConfig,

View File

@ -9,7 +9,6 @@ package docspell.restserver
import cats.effect._ import cats.effect._
import fs2.Stream import fs2.Stream
import fs2.concurrent.Topic import fs2.concurrent.Topic
import docspell.backend.BackendApp import docspell.backend.BackendApp
import docspell.backend.auth.{AuthToken, ShareToken} import docspell.backend.auth.{AuthToken, ShareToken}
import docspell.ftsclient.FtsClient import docspell.ftsclient.FtsClient
@ -23,8 +22,8 @@ import docspell.restserver.http4s.EnvMiddleware
import docspell.restserver.routes._ import docspell.restserver.routes._
import docspell.restserver.webapp.{TemplateRoutes, Templates, WebjarRoutes} import docspell.restserver.webapp.{TemplateRoutes, Templates, WebjarRoutes}
import docspell.restserver.ws.{OutputEvent, WebSocketRoutes} import docspell.restserver.ws.{OutputEvent, WebSocketRoutes}
import docspell.scheduler.impl.JobStoreModuleBuilder
import docspell.store.Store import docspell.store.Store
import emil.javamail.JavaMailEmil import emil.javamail.JavaMailEmil
import org.http4s.HttpRoutes import org.http4s.HttpRoutes
import org.http4s.client.Client import org.http4s.client.Client
@ -167,8 +166,12 @@ object RestAppImpl {
notificationMod <- Resource.eval( notificationMod <- Resource.eval(
NotificationModuleImpl[F](store, javaEmil, httpClient, 200) NotificationModuleImpl[F](store, javaEmil, httpClient, 200)
) )
schedulerMod = JobStoreModuleBuilder(store)
.withPubsub(pubSubT)
.withEventSink(notificationMod)
.build
backend <- BackendApp backend <- BackendApp
.create[F](store, javaEmil, ftsClient, pubSubT, notificationMod) .create[F](store, javaEmil, ftsClient, pubSubT, schedulerMod, notificationMod)
app = new RestAppImpl[F]( app = new RestAppImpl[F](
cfg, cfg,

View File

@ -6,14 +6,9 @@
package docspell.scheduler package docspell.scheduler
import cats.effect._
import cats.implicits._
import cats.{Applicative, Functor}
import docspell.common._ import docspell.common._
import docspell.logging.Logger import docspell.logging.Logger
import docspell.store.Store import docspell.store.Store
import docspell.store.records.RJob
trait Context[F[_], A] { self => trait Context[F[_], A] { self =>
@ -29,54 +24,8 @@ trait Context[F[_], A] { self =>
def store: Store[F] def store: Store[F]
final def isLastRetry(implicit ev: Applicative[F]): F[Boolean] = def isLastRetry: F[Boolean]
for {
current <- store.transact(RJob.getRetries(jobId)) def map[C](f: A => C): Context[F, C]
last = config.retries == current.getOrElse(0)
} yield last
def map[C](f: A => C)(implicit F: Functor[F]): Context[F, C] =
new Context.ContextImpl[F, C](f(args), logger, store, config, jobId)
}
object Context {
def create[F[_]: Async, A](
jobId: Ident,
arg: A,
config: SchedulerConfig,
log: Logger[F],
store: Store[F]
): Context[F, A] =
new ContextImpl(arg, log, store, config, jobId)
def apply[F[_]: Async, A](
job: RJob,
arg: A,
config: SchedulerConfig,
logSink: LogSink[F],
store: Store[F]
): F[Context[F, A]] = {
val log = docspell.logging.getLogger[F]
for {
_ <- log.trace("Creating logger for task run")
logger <- QueueLogger(job.id, job.info, config.logBufferSize, logSink)
_ <- log.trace("Logger created, instantiating context")
ctx = create[F, A](job.id, arg, config, logger, store)
} yield ctx
}
final private class ContextImpl[F[_]: Functor, A](
val args: A,
val logger: Logger[F],
val store: Store[F],
val config: SchedulerConfig,
val jobId: Ident
) extends Context[F, A] {
def setProgress(percent: Int): F[Unit] = {
val pval = math.min(100, math.max(0, percent))
store.transact(RJob.setProgress(jobId, pval)).map(_ => ())
}
}
} }

View File

@ -0,0 +1,36 @@
package docspell.scheduler
import cats.effect.Sync
import cats.syntax.functor._
import docspell.common._
import io.circe.Encoder
final case class Job[A](
id: Ident,
task: Ident,
group: Ident,
args: A,
subject: String,
submitter: Ident,
priority: Priority,
tracker: Option[Ident]
) {
def encode(implicit E: Encoder[A]): Job[String] =
Job(id, task, group, E.apply(args).noSpaces, subject, submitter, priority, tracker)
}
object Job {
def createNew[F[_]: Sync, A](
task: Ident,
group: Ident,
args: A,
subject: String,
submitter: Ident,
priority: Priority,
tracker: Option[Ident]
): F[Job[A]] =
Ident.randomId[F].map { id =>
Job(id, task, group, args, subject, submitter, priority, tracker)
}
}

View File

@ -1,97 +0,0 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.scheduler
import cats.effect._
import cats.implicits._
import docspell.common._
import docspell.store.Store
import docspell.store.queries.QJob
import docspell.store.records.RJob
trait JobQueue[F[_]] {
/** Inserts the job into the queue to get picked up as soon as possible. The job must
* have a new unique id.
*/
def insert(job: RJob): F[Unit]
/** Inserts the job into the queue only, if there is no job with the same tracker-id
* running at the moment. The job id must be a new unique id.
*
* If the job has no tracker defined, it is simply inserted.
*/
def insertIfNew(job: RJob): F[Boolean]
def insertAll(jobs: Seq[RJob]): F[List[Boolean]]
def insertAllIfNew(jobs: Seq[RJob]): F[List[Boolean]]
def nextJob(
prio: Ident => F[Priority],
worker: Ident,
retryPause: Duration
): F[Option[RJob]]
}
object JobQueue {
private[scheduler] def create[F[_]: Async](store: Store[F]): Resource[F, JobQueue[F]] =
Resource.pure[F, JobQueue[F]](new JobQueue[F] {
private[this] val logger = docspell.logging.getLogger[F]
def nextJob(
prio: Ident => F[Priority],
worker: Ident,
retryPause: Duration
): F[Option[RJob]] =
logger
.trace("Select next job") *> QJob.takeNextJob(store)(prio, worker, retryPause)
def insert(job: RJob): F[Unit] =
store
.transact(RJob.insert(job))
.flatMap { n =>
if (n != 1)
Async[F]
.raiseError(new Exception(s"Inserting job failed. Update count: $n"))
else ().pure[F]
}
def insertIfNew(job: RJob): F[Boolean] =
for {
rj <- job.tracker match {
case Some(tid) =>
store.transact(RJob.findNonFinalByTracker(tid))
case None =>
None.pure[F]
}
ret <-
if (rj.isDefined) false.pure[F]
else insert(job).as(true)
} yield ret
def insertAll(jobs: Seq[RJob]): F[List[Boolean]] =
jobs.toList
.traverse(j => insert(j).attempt)
.flatMap(_.traverse {
case Right(()) => true.pure[F]
case Left(ex) =>
logger.error(ex)("Could not insert job. Skipping it.").as(false)
})
def insertAllIfNew(jobs: Seq[RJob]): F[List[Boolean]] =
jobs.toList
.traverse(j => insertIfNew(j).attempt)
.flatMap(_.traverse {
case Right(true) => true.pure[F]
case Right(false) => false.pure[F]
case Left(ex) =>
logger.error(ex)("Could not insert job. Skipping it.").as(false)
})
})
}

View File

@ -0,0 +1,21 @@
package docspell.scheduler
trait JobStore[F[_]] {
/** Inserts the job into the queue to get picked up as soon as possible. The job must
* have a new unique id.
*/
def insert(job: Job[String]): F[Unit]
/** Inserts the job into the queue only, if there is no job with the same tracker-id
* running at the moment. The job id must be a new unique id.
*
* If the job has no tracker defined, it is simply inserted.
*/
def insertIfNew(job: Job[String]): F[Boolean]
def insertAll(jobs: Seq[Job[String]]): F[List[Boolean]]
def insertAllIfNew(jobs: Seq[Job[String]]): F[List[Boolean]]
}

View File

@ -0,0 +1,9 @@
package docspell.scheduler
import docspell.scheduler.usertask.UserTaskStore
trait JobStoreModule[F[_]] {
def userTasks: UserTaskStore[F]
def jobs: JobStore[F]
}

View File

@ -43,7 +43,7 @@ object JobTask {
str.parseJsonAs[A] match { str.parseJsonAs[A] match {
case Right(a) => a.pure[F] case Right(a) => a.pure[F]
case Left(ex) => case Left(ex) =>
Sync[F].raiseError(new Exception(s"Cannot parse task arguments: $str", ex)) Sync[F].raiseError(new Exception(s"Cannot parse task arguments: '$str'", ex))
} }
JobTask(name, task.contramap(convert).map(E.encode), onCancel.contramap(convert)) JobTask(name, task.contramap(convert).map(E.encode), onCancel.contramap(convert))

View File

@ -12,3 +12,8 @@ case class PeriodicSchedulerConfig(
name: Ident, name: Ident,
wakeupPeriod: Duration wakeupPeriod: Duration
) )
object PeriodicSchedulerConfig {
def default(id: Ident): PeriodicSchedulerConfig =
PeriodicSchedulerConfig(id, Duration.minutes(10))
}

View File

@ -20,11 +20,10 @@ case class SchedulerConfig(
object SchedulerConfig { object SchedulerConfig {
val default = SchedulerConfig( def default(id: Ident) = SchedulerConfig(
name = Ident.unsafe("default-scheduler"), name = id,
poolSize = 2 // math.max(2, Runtime.getRuntime.availableProcessors / 2) poolSize = 1,
, countingScheme = CountingScheme(3, 1),
countingScheme = CountingScheme(2, 1),
retries = 5, retries = 5,
retryDelay = Duration.seconds(30), retryDelay = Duration.seconds(30),
logBufferSize = 500, logBufferSize = 500,

View File

@ -0,0 +1,6 @@
package docspell.scheduler
trait SchedulerModule[F[_]] {
def scheduler: Scheduler[F]
def periodicScheduler: PeriodicScheduler[F]
}

View File

@ -6,15 +6,11 @@
package docspell.scheduler.usertask package docspell.scheduler.usertask
import cats.effect._
import cats.implicits._
import com.github.eikek.calev.CalEvent import com.github.eikek.calev.CalEvent
import docspell.common._ import docspell.common._
import docspell.common.syntax.all._ import io.circe.Encoder
import docspell.store.records.RPeriodicTask
import io.circe.{Decoder, Encoder}
case class UserTask[A]( final case class UserTask[A](
id: Ident, id: Ident,
name: Ident, name: Ident,
enabled: Boolean, enabled: Boolean,
@ -32,33 +28,3 @@ case class UserTask[A](
def mapArgs[B](f: A => B): UserTask[B] = def mapArgs[B](f: A => B): UserTask[B] =
withArgs(f(args)) withArgs(f(args))
} }
object UserTask {
implicit final class UserTaskCodec(ut: UserTask[String]) {
def decode[A](implicit D: Decoder[A]): Either[String, UserTask[A]] =
ut.args
.parseJsonAs[A]
.left
.map(_.getMessage)
.map(a => ut.copy(args = a))
def toPeriodicTask[F[_]: Sync](
scope: UserTaskScope,
subject: Option[String]
): F[RPeriodicTask] =
QUserTask
.create[F](
ut.enabled,
scope,
ut.name,
ut.args,
subject.getOrElse(s"${scope.fold(_.user.id, _.id)}: ${ut.name.id}"),
Priority.Low,
ut.timer,
ut.summary
)
.map(r => r.copy(id = ut.id))
}
}

View File

@ -20,7 +20,7 @@ sealed trait UserTaskScope { self: Product =>
/** Maps to the account or uses the collective for both parts if the scope is collective /** Maps to the account or uses the collective for both parts if the scope is collective
* wide. * wide.
*/ */
private[usertask] def toAccountId: AccountId = private[scheduler] def toAccountId: AccountId =
AccountId(collective, fold(_.user, identity)) AccountId(collective, fold(_.user, identity))
} }
@ -49,4 +49,7 @@ object UserTaskScope {
def apply(collective: Ident): UserTaskScope = def apply(collective: Ident): UserTaskScope =
UserTaskScope.collective(collective) UserTaskScope.collective(collective)
def system: UserTaskScope =
collective(DocspellSystem.taskGroup)
} }

View File

@ -7,10 +7,7 @@
package docspell.scheduler.usertask package docspell.scheduler.usertask
import cats.data.OptionT import cats.data.OptionT
import cats.effect._
import cats.implicits._
import docspell.common._ import docspell.common._
import docspell.store.{AddResult, Store}
import fs2.Stream import fs2.Stream
import io.circe._ import io.circe._
@ -88,96 +85,11 @@ trait UserTaskStore[F[_]] {
/** Delete all tasks of the given user that have name `name`. */ /** Delete all tasks of the given user that have name `name`. */
def deleteAll(scope: UserTaskScope, name: Ident): F[Int] def deleteAll(scope: UserTaskScope, name: Ident): F[Int]
}
object UserTaskStore { /** Discards the schedule and immediately submits the task to the job executor's queue.
* It will not update the corresponding periodic task.
def apply[F[_]: Async](store: Store[F]): Resource[F, UserTaskStore[F]] = */
Resource.pure[F, UserTaskStore[F]](new UserTaskStore[F] { def executeNow[A](scope: UserTaskScope, subject: Option[String], task: UserTask[A])(
def getAll(scope: UserTaskScope): Stream[F, UserTask[String]] =
store.transact(QUserTask.findAll(scope.toAccountId))
def getByNameRaw(scope: UserTaskScope, name: Ident): Stream[F, UserTask[String]] =
store.transact(QUserTask.findByName(scope.toAccountId, name))
def getByIdRaw(scope: UserTaskScope, id: Ident): OptionT[F, UserTask[String]] =
OptionT(store.transact(QUserTask.findById(scope.toAccountId, id)))
def getByName[A](scope: UserTaskScope, name: Ident)(implicit
D: Decoder[A]
): Stream[F, UserTask[A]] =
getByNameRaw(scope, name).flatMap(_.decode match {
case Right(ua) => Stream.emit(ua)
case Left(err) => Stream.raiseError[F](new Exception(err))
})
def updateTask[A](scope: UserTaskScope, subject: Option[String], ut: UserTask[A])(
implicit E: Encoder[A] implicit E: Encoder[A]
): F[Int] = { ): F[Unit]
val exists = QUserTask.exists(ut.id)
val insert = QUserTask.insert(scope, subject, ut.encode)
store.add(insert, exists).flatMap {
case AddResult.Success =>
1.pure[F]
case AddResult.EntityExists(_) =>
store.transact(QUserTask.update(scope, subject, ut.encode))
case AddResult.Failure(ex) =>
Async[F].raiseError(ex)
}
}
def deleteTask(scope: UserTaskScope, id: Ident): F[Int] =
store.transact(QUserTask.delete(scope.toAccountId, id))
def getOneByNameRaw(
scope: UserTaskScope,
name: Ident
): OptionT[F, UserTask[String]] =
OptionT(
getByNameRaw(scope, name)
.take(2)
.compile
.toList
.flatMap {
case Nil => (None: Option[UserTask[String]]).pure[F]
case ut :: Nil => ut.some.pure[F]
case _ => Async[F].raiseError(new Exception("More than one result found"))
}
)
def getOneByName[A](scope: UserTaskScope, name: Ident)(implicit
D: Decoder[A]
): OptionT[F, UserTask[A]] =
getOneByNameRaw(scope, name)
.semiflatMap(_.decode match {
case Right(ua) => ua.pure[F]
case Left(err) => Async[F].raiseError(new Exception(err))
})
def updateOneTask[A](
scope: UserTaskScope,
subject: Option[String],
ut: UserTask[A]
)(implicit
E: Encoder[A]
): F[UserTask[String]] =
getByNameRaw(scope, ut.name).compile.toList.flatMap {
case a :: rest =>
val task = ut.copy(id = a.id).encode
for {
_ <- store.transact(QUserTask.update(scope, subject, task))
_ <- store.transact(
rest.traverse(t => QUserTask.delete(scope.toAccountId, t.id))
)
} yield task
case Nil =>
val task = ut.encode
store.transact(QUserTask.insert(scope, subject, task)).map(_ => task)
}
def deleteAll(scope: UserTaskScope, name: Ident): F[Int] =
store.transact(QUserTask.deleteAll(scope.toAccountId, name))
})
} }

View File

@ -4,10 +4,9 @@
* SPDX-License-Identifier: AGPL-3.0-or-later * SPDX-License-Identifier: AGPL-3.0-or-later
*/ */
package docspell.joex.scheduler package docspell.scheduler
import docspell.common.Priority import docspell.common.Priority
import munit._ import munit._
class CountingSchemeSpec extends FunSuite { class CountingSchemeSpec extends FunSuite {

View File

@ -0,0 +1,61 @@
package docspell.scheduler.impl
import cats._
import cats.syntax.all._
import cats.effect._
import docspell.common._
import docspell.logging.Logger
import docspell.scheduler._
import docspell.store.Store
import docspell.store.records.RJob
class ContextImpl[F[_]: Functor, A](
val args: A,
val logger: Logger[F],
val store: Store[F],
val config: SchedulerConfig,
val jobId: Ident
) extends Context[F, A] {
def setProgress(percent: Int): F[Unit] = {
val pval = math.min(100, math.max(0, percent))
store.transact(RJob.setProgress(jobId, pval)).map(_ => ())
}
def isLastRetry: F[Boolean] =
for {
current <- store.transact(RJob.getRetries(jobId))
last = config.retries == current.getOrElse(0)
} yield last
def map[C](f: A => C) =
new ContextImpl[F, C](f(args), logger, store, config, jobId)
}
object ContextImpl {
def create[F[_]: Async, A](
jobId: Ident,
arg: A,
config: SchedulerConfig,
log: Logger[F],
store: Store[F]
): Context[F, A] =
new ContextImpl(arg, log, store, config, jobId)
def apply[F[_]: Async, A](
job: RJob,
arg: A,
config: SchedulerConfig,
logSink: LogSink[F],
store: Store[F]
): F[Context[F, A]] = {
val log = docspell.logging.getLogger[F]
for {
_ <- log.trace("Creating logger for task run")
logger <- QueueLogger(job.id, job.info, config.logBufferSize, logSink)
_ <- log.trace("Logger created, instantiating context")
ctx = create[F, A](job.id, arg, config, logger, store)
} yield ctx
}
}

View File

@ -0,0 +1,37 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.scheduler.impl
import cats.effect._
import cats.implicits._
import docspell.common._
import docspell.store.Store
import docspell.store.queries.QJob
import docspell.store.records.RJob
trait JobQueue[F[_]] {
def nextJob(
prio: Ident => F[Priority],
worker: Ident,
retryPause: Duration
): F[Option[RJob]]
}
object JobQueue {
private[scheduler] def apply[F[_]: Async](store: Store[F]): JobQueue[F] =
new JobQueue[F] {
private[this] val logger = docspell.logging.getLogger[F]
def nextJob(
prio: Ident => F[Priority],
worker: Ident,
retryPause: Duration
): F[Option[RJob]] =
logger
.trace("Select next job") *> QJob.takeNextJob(store)(prio, worker, retryPause)
}
}

View File

@ -0,0 +1,87 @@
package docspell.scheduler.impl
import cats.effect.Sync
import cats.syntax.all._
import docspell.common.Timestamp
import docspell.scheduler._
import docspell.store.Store
import docspell.store.records.RJob
final class JobStoreImpl[F[_]: Sync](store: Store[F]) extends JobStore[F] {
private[this] val logger = docspell.logging.getLogger[F]
def insert(job: Job[String]): F[Unit] =
for {
now <- Timestamp.current[F]
_ <- insert0(job, now)
} yield ()
def insert0(job: Job[String], submitted: Timestamp): F[Unit] =
store
.transact(RJob.insert(toRecord(job, submitted)))
.flatMap { n =>
if (n != 1)
Sync[F]
.raiseError(new Exception(s"Inserting job failed. Update count: $n"))
else ().pure[F]
}
def insertIfNew(job: Job[String]): F[Boolean] =
Timestamp.current[F].flatMap(now => insertIfNew0(job, now))
def insertIfNew0(job: Job[String], submitted: Timestamp): F[Boolean] =
for {
rj <- job.tracker match {
case Some(tid) =>
store.transact(RJob.findNonFinalByTracker(tid))
case None =>
None.pure[F]
}
ret <-
if (rj.isDefined) false.pure[F]
else insert0(job, submitted).as(true)
} yield ret
def insertAll(jobs: Seq[Job[String]]): F[List[Boolean]] =
Timestamp.current[F].flatMap { now =>
jobs.toList
.traverse(j => insert0(j, now).attempt)
.flatMap(_.traverse {
case Right(()) => true.pure[F]
case Left(ex) =>
logger.error(ex)("Could not insert job. Skipping it.").as(false)
})
}
def insertAllIfNew(jobs: Seq[Job[String]]) =
Timestamp.current[F].flatMap { now =>
jobs.toList
.traverse(j => insertIfNew0(j, now).attempt)
.flatMap(_.traverse {
case Right(true) => true.pure[F]
case Right(false) => false.pure[F]
case Left(ex) =>
logger.error(ex)("Could not insert job. Skipping it.").as(false)
})
}
def toRecord(job: Job[String], timestamp: Timestamp): RJob =
RJob.newJob(
job.id,
job.task,
job.group,
job.args,
job.subject,
timestamp,
job.submitter,
job.priority,
job.tracker
)
}
object JobStoreImpl {
def apply[F[_]: Sync](store: Store[F]): JobStore[F] =
new JobStoreImpl[F](store)
}

View File

@ -0,0 +1,50 @@
package docspell.scheduler.impl
import cats.effect.Async
import docspell.notification.api.EventSink
import docspell.pubsub.api.PubSubT
import docspell.scheduler._
import docspell.scheduler.usertask.UserTaskStore
import docspell.store.Store
case class JobStoreModuleBuilder[F[_]: Async](
store: Store[F],
pubsub: PubSubT[F],
eventSink: EventSink[F]
) {
def withPubsub(ps: PubSubT[F]): JobStoreModuleBuilder[F] =
copy(pubsub = ps)
def withEventSink(es: EventSink[F]): JobStoreModuleBuilder[F] =
copy(eventSink = es)
def build: JobStoreModuleBuilder.Module[F] = {
val jobStore = JobStorePublish(store, pubsub, eventSink)
val periodicTaskStore = PeriodicTaskStore(store, jobStore)
val userTaskStore = UserTaskStoreImpl(store, periodicTaskStore)
new JobStoreModuleBuilder.Module(
userTaskStore,
periodicTaskStore,
jobStore,
store,
eventSink,
pubsub
)
}
}
object JobStoreModuleBuilder {
def apply[F[_]: Async](store: Store[F]): JobStoreModuleBuilder[F] =
JobStoreModuleBuilder(store, PubSubT.noop[F], EventSink.silent[F])
final class Module[F[_]](
val userTasks: UserTaskStore[F],
val periodicTaskStore: PeriodicTaskStore[F],
val jobs: JobStore[F],
val store: Store[F],
val eventSink: EventSink[F],
val pubSubT: PubSubT[F]
) extends JobStoreModule[F] {}
}

View File

@ -4,51 +4,52 @@
* SPDX-License-Identifier: AGPL-3.0-or-later * SPDX-License-Identifier: AGPL-3.0-or-later
*/ */
package docspell.scheduler.msg package docspell.scheduler.impl
import cats.effect._ import cats.effect._
import cats.implicits._ import cats.implicits._
import docspell.common.{Duration, Ident, Priority}
import docspell.common.JobState
import docspell.notification.api.{Event, EventSink} import docspell.notification.api.{Event, EventSink}
import docspell.pubsub.api.PubSubT import docspell.pubsub.api.PubSubT
import docspell.scheduler.JobQueue import docspell.scheduler._
import docspell.scheduler.msg.JobSubmitted
import docspell.store.Store import docspell.store.Store
import docspell.store.records.RJob
final class JobQueuePublish[F[_]: Sync]( final class JobStorePublish[F[_]: Sync](
delegate: JobQueue[F], delegate: JobStore[F],
pubsub: PubSubT[F], pubsub: PubSubT[F],
eventSink: EventSink[F] eventSink: EventSink[F]
) extends JobQueue[F] { ) extends JobStore[F] {
private def msg(job: RJob): JobSubmitted = private def msg(job: Job[String]): JobSubmitted =
JobSubmitted(job.id, job.group, job.task, job.args) JobSubmitted(job.id, job.group, job.task, job.args)
private def event(job: RJob): Event.JobSubmitted = private def event(job: Job[String]): Event.JobSubmitted =
Event.JobSubmitted( Event.JobSubmitted(
job.id, job.id,
job.group, job.group,
job.task, job.task,
job.args, job.args,
job.state, JobState.waiting,
job.subject, job.subject,
job.submitter job.submitter
) )
private def publish(job: RJob): F[Unit] = private def publish(job: Job[String]): F[Unit] =
pubsub.publish1(JobSubmitted.topic, msg(job)).as(()) *> pubsub.publish1(JobSubmitted.topic, msg(job)).as(()) *>
eventSink.offer(event(job)) eventSink.offer(event(job))
def insert(job: RJob) = def insert(job: Job[String]) =
delegate.insert(job).flatTap(_ => publish(job)) delegate.insert(job).flatTap(_ => publish(job))
def insertIfNew(job: RJob) = def insertIfNew(job: Job[String]) =
delegate.insertIfNew(job).flatTap { delegate.insertIfNew(job).flatTap {
case true => publish(job) case true => publish(job)
case false => ().pure[F] case false => ().pure[F]
} }
def insertAll(jobs: Seq[RJob]) = def insertAll(jobs: Seq[Job[String]]) =
delegate.insertAll(jobs).flatTap { results => delegate.insertAll(jobs).flatTap { results =>
results.zip(jobs).traverse { case (res, job) => results.zip(jobs).traverse { case (res, job) =>
if (res) publish(job) if (res) publish(job)
@ -56,23 +57,20 @@ final class JobQueuePublish[F[_]: Sync](
} }
} }
def insertAllIfNew(jobs: Seq[RJob]) = def insertAllIfNew(jobs: Seq[Job[String]]) =
delegate.insertAllIfNew(jobs).flatTap { results => delegate.insertAllIfNew(jobs).flatTap { results =>
results.zip(jobs).traverse { case (res, job) => results.zip(jobs).traverse { case (res, job) =>
if (res) publish(job) if (res) publish(job)
else ().pure[F] else ().pure[F]
} }
} }
def nextJob(prio: Ident => F[Priority], worker: Ident, retryPause: Duration) =
delegate.nextJob(prio, worker, retryPause)
} }
object JobQueuePublish { object JobStorePublish {
def apply[F[_]: Async]( def apply[F[_]: Async](
store: Store[F], store: Store[F],
pubSub: PubSubT[F], pubSub: PubSubT[F],
eventSink: EventSink[F] eventSink: EventSink[F]
): Resource[F, JobQueue[F]] = ): JobStore[F] =
JobQueue.create(store).map(q => new JobQueuePublish[F](q, pubSub, eventSink)) new JobStorePublish[F](JobStoreImpl(store), pubSub, eventSink)
} }

View File

@ -4,16 +4,16 @@
* SPDX-License-Identifier: AGPL-3.0-or-later * SPDX-License-Identifier: AGPL-3.0-or-later
*/ */
package docspell.scheduler package docspell.scheduler.impl
import cats.effect._ import cats.effect._
import cats.implicits._ import cats.implicits._
import fs2.Pipe
import docspell.common._ import docspell.common._
import docspell.logging import docspell.logging
import docspell.scheduler.LogEvent
import docspell.store.Store import docspell.store.Store
import docspell.store.records.RJobLog import docspell.store.records.RJobLog
import fs2.Pipe
trait LogSink[F[_]] { trait LogSink[F[_]] {

View File

@ -7,10 +7,8 @@ import fs2.concurrent.SignallingRef
object PeriodicSchedulerBuilder { object PeriodicSchedulerBuilder {
def build[F[_]: Async]( def resource[F[_]: Async](
cfg: PeriodicSchedulerConfig, cfg: PeriodicSchedulerConfig,
sch: Scheduler[F],
queue: JobQueue[F],
store: PeriodicTaskStore[F], store: PeriodicTaskStore[F],
pubsub: PubSubT[F] pubsub: PubSubT[F]
): Resource[F, PeriodicScheduler[F]] = ): Resource[F, PeriodicScheduler[F]] =
@ -19,8 +17,6 @@ object PeriodicSchedulerBuilder {
state <- Resource.eval(SignallingRef(PeriodicSchedulerImpl.emptyState[F])) state <- Resource.eval(SignallingRef(PeriodicSchedulerImpl.emptyState[F]))
psch = new PeriodicSchedulerImpl[F]( psch = new PeriodicSchedulerImpl[F](
cfg, cfg,
sch,
queue,
store, store,
pubsub, pubsub,
waiter, waiter,

View File

@ -20,8 +20,6 @@ import eu.timepit.fs2cron.calev.CalevScheduler
final class PeriodicSchedulerImpl[F[_]: Async]( final class PeriodicSchedulerImpl[F[_]: Async](
val config: PeriodicSchedulerConfig, val config: PeriodicSchedulerConfig,
sch: Scheduler[F],
queue: JobQueue[F],
store: PeriodicTaskStore[F], store: PeriodicTaskStore[F],
pubSub: PubSubT[F], pubSub: PubSubT[F],
waiter: SignallingRef[F, Boolean], waiter: SignallingRef[F, Boolean],
@ -119,11 +117,11 @@ final class PeriodicSchedulerImpl[F[_]: Async](
case None => case None =>
logger.info(s"Submitting job for periodic task '${pj.task.id}'") *> logger.info(s"Submitting job for periodic task '${pj.task.id}'") *>
pj.toJob.flatMap(queue.insert) *> notifyJoex *> true.pure[F] store.submit(pj) *> notifyJoex *> true.pure[F]
} }
def notifyJoex: F[Unit] = def notifyJoex: F[Unit] =
sch.notifyChange *> pubSub.publish1IgnoreErrors(JobsNotify(), ()).void pubSub.publish1IgnoreErrors(JobsNotify(), ()).void
def scheduleNotify(pj: RPeriodicTask): F[Unit] = def scheduleNotify(pj: RPeriodicTask): F[Unit] =
Timestamp Timestamp

View File

@ -9,6 +9,7 @@ package docspell.scheduler.impl
import cats.effect._ import cats.effect._
import cats.implicits._ import cats.implicits._
import docspell.common._ import docspell.common._
import docspell.scheduler.{Job, JobStore}
import docspell.store.queries.QPeriodicTask import docspell.store.queries.QPeriodicTask
import docspell.store.records._ import docspell.store.records._
import docspell.store.{AddResult, Store} import docspell.store.{AddResult, Store}
@ -37,12 +38,18 @@ trait PeriodicTaskStore[F[_]] {
/** Find all joex nodes as registered in the database. */ /** Find all joex nodes as registered in the database. */
def findJoexNodes: F[Vector[RNode]] def findJoexNodes: F[Vector[RNode]]
/** Creates a job from the given task and submits it into the job queue */
def submit(task: RPeriodicTask): F[Unit]
} }
object PeriodicTaskStore { object PeriodicTaskStore {
def create[F[_]: Sync](store: Store[F]): Resource[F, PeriodicTaskStore[F]] = def apply[F[_]: Sync](
Resource.pure[F, PeriodicTaskStore[F]](new PeriodicTaskStore[F] { store: Store[F],
jobStore: JobStore[F]
): PeriodicTaskStore[F] =
new PeriodicTaskStore[F] {
private[this] val logger = docspell.logging.getLogger[F] private[this] val logger = docspell.logging.getLogger[F]
def takeNext( def takeNext(
worker: Ident, worker: Ident,
@ -116,5 +123,22 @@ object PeriodicTaskStore {
def findJoexNodes: F[Vector[RNode]] = def findJoexNodes: F[Vector[RNode]] =
store.transact(RNode.findAll(NodeType.Joex)) store.transact(RNode.findAll(NodeType.Joex))
}) def submit(task: RPeriodicTask) =
makeJob(task).flatMap(jobStore.insert)
def makeJob(rt: RPeriodicTask): F[Job[String]] =
Ident.randomId[F].map { id =>
Job(
id,
rt.task,
rt.group,
rt.args,
rt.subject,
rt.submitter,
rt.priority,
Some(id)
)
}
}
} }

View File

@ -1,19 +1,48 @@
package docspell.scheduler.usertask package docspell.scheduler.impl
import cats.implicits._
import cats.effect.Sync import cats.effect.Sync
import cats.syntax.all._
import com.github.eikek.calev.CalEvent import com.github.eikek.calev.CalEvent
import docspell.common._ import docspell.common.{AccountId, Ident, Priority, Timestamp}
import docspell.scheduler.usertask.{UserTask, UserTaskScope}
import docspell.store.qb.DML import docspell.store.qb.DML
import docspell.store.qb.DSL._ import docspell.store.qb.DSL._
import docspell.store.records.RPeriodicTask import docspell.store.records.RPeriodicTask
import doobie.ConnectionIO
import fs2.Stream import fs2.Stream
import io.circe.Encoder import io.circe.{Decoder, Encoder}
import doobie._
object QUserTask { object QUserTask {
private val RT = RPeriodicTask.T private val RT = RPeriodicTask.T
implicit final class UserTaskCodec(ut: UserTask[String]) {
import docspell.common.syntax.all._
def decode[A](implicit D: Decoder[A]): Either[String, UserTask[A]] =
ut.args
.parseJsonAs[A]
.left
.map(_.getMessage)
.map(a => ut.copy(args = a))
def toPeriodicTask[F[_]: Sync](
scope: UserTaskScope,
subject: Option[String]
): F[RPeriodicTask] =
QUserTask
.create[F](
ut.enabled,
scope,
ut.name,
ut.args,
subject.getOrElse(s"${scope.fold(_.user.id, _.id)}: ${ut.name.id}"),
Priority.Low,
ut.timer,
ut.summary
)
.map(r => r.copy(id = ut.id))
}
def findAll(account: AccountId): Stream[ConnectionIO, UserTask[String]] = def findAll(account: AccountId): Stream[ConnectionIO, UserTask[String]] =
run( run(
select(RT.all), select(RT.all),

View File

@ -1,19 +1,13 @@
/* package docspell.scheduler.impl
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.scheduler import cats.syntax.all._
import cats.effect._
import cats.effect.std.Queue import cats.effect.std.Queue
import cats.implicits._ import cats.effect.{Async, Sync}
import fs2.Stream import docspell.common.{Ident, LogLevel}
import docspell.common._
import docspell.logging import docspell.logging
import docspell.logging.{Level, Logger} import docspell.logging.{Level, Logger}
import docspell.scheduler.LogEvent
import fs2.Stream
object QueueLogger { object QueueLogger {

View File

@ -10,7 +10,7 @@ import cats.effect._
import cats.effect.std.Semaphore import cats.effect.std.Semaphore
import cats.implicits._ import cats.implicits._
import fs2.concurrent.SignallingRef import fs2.concurrent.SignallingRef
import docspell.scheduler.{JobQueue, _} import docspell.scheduler._
import docspell.notification.api.EventSink import docspell.notification.api.EventSink
import docspell.pubsub.api.PubSubT import docspell.pubsub.api.PubSubT
import docspell.store.Store import docspell.store.Store
@ -19,7 +19,7 @@ case class SchedulerBuilder[F[_]: Async](
config: SchedulerConfig, config: SchedulerConfig,
tasks: JobTaskRegistry[F], tasks: JobTaskRegistry[F],
store: Store[F], store: Store[F],
queue: Resource[F, JobQueue[F]], queue: JobQueue[F],
logSink: LogSink[F], logSink: LogSink[F],
pubSub: PubSubT[F], pubSub: PubSubT[F],
eventSink: EventSink[F] eventSink: EventSink[F]
@ -34,14 +34,11 @@ case class SchedulerBuilder[F[_]: Async](
def withTask[A](task: JobTask[F]): SchedulerBuilder[F] = def withTask[A](task: JobTask[F]): SchedulerBuilder[F] =
withTaskRegistry(tasks.withTask(task)) withTaskRegistry(tasks.withTask(task))
def withQueue(queue: Resource[F, JobQueue[F]]): SchedulerBuilder[F] =
copy(queue = queue)
def withLogSink(sink: LogSink[F]): SchedulerBuilder[F] = def withLogSink(sink: LogSink[F]): SchedulerBuilder[F] =
copy(logSink = sink) copy(logSink = sink)
def withQueue(queue: JobQueue[F]): SchedulerBuilder[F] = def withQueue(queue: JobQueue[F]): SchedulerBuilder[F] =
copy(queue = Resource.pure[F, JobQueue[F]](queue)) copy(queue = queue)
def withPubSub(pubSubT: PubSubT[F]): SchedulerBuilder[F] = def withPubSub(pubSubT: PubSubT[F]): SchedulerBuilder[F] =
copy(pubSub = pubSubT) copy(pubSub = pubSubT)
@ -53,14 +50,13 @@ case class SchedulerBuilder[F[_]: Async](
resource.evalMap(sch => Async[F].start(sch.start.compile.drain).map(_ => sch)) resource.evalMap(sch => Async[F].start(sch.start.compile.drain).map(_ => sch))
def resource: Resource[F, Scheduler[F]] = { def resource: Resource[F, Scheduler[F]] = {
val scheduler: Resource[F, SchedulerImpl[F]] = for { val scheduler: F[SchedulerImpl[F]] = for {
jq <- queue waiter <- SignallingRef(true)
waiter <- Resource.eval(SignallingRef(true)) state <- SignallingRef(SchedulerImpl.emptyState[F])
state <- Resource.eval(SignallingRef(SchedulerImpl.emptyState[F])) perms <- Semaphore(config.poolSize.toLong)
perms <- Resource.eval(Semaphore(config.poolSize.toLong))
} yield new SchedulerImpl[F]( } yield new SchedulerImpl[F](
config, config,
jq, queue,
pubSub, pubSub,
eventSink, eventSink,
tasks, tasks,
@ -71,7 +67,7 @@ case class SchedulerBuilder[F[_]: Async](
perms perms
) )
scheduler.evalTap(_.init).map(s => s: Scheduler[F]) Resource.eval(scheduler.flatTap(_.init)).map(s => s: Scheduler[F])
} }
} }
@ -86,10 +82,9 @@ object SchedulerBuilder {
config, config,
JobTaskRegistry.empty[F], JobTaskRegistry.empty[F],
store, store,
JobQueue.create(store), JobQueue(store),
LogSink.db[F](store), LogSink.db[F](store),
PubSubT.noop[F], PubSubT.noop[F],
EventSink.silent[F] EventSink.silent[F]
) )
} }

View File

@ -14,7 +14,7 @@ import fs2.Stream
import fs2.concurrent.SignallingRef import fs2.concurrent.SignallingRef
import docspell.scheduler.msg.{CancelJob, JobDone, JobsNotify} import docspell.scheduler.msg.{CancelJob, JobDone, JobsNotify}
import docspell.common._ import docspell.common._
import docspell.scheduler.{JobQueue, _} import docspell.scheduler._
import docspell.scheduler.impl.SchedulerImpl._ import docspell.scheduler.impl.SchedulerImpl._
import docspell.notification.api.Event import docspell.notification.api.Event
import docspell.notification.api.EventSink import docspell.notification.api.EventSink
@ -172,7 +172,7 @@ final class SchedulerImpl[F[_]: Async](
for { for {
_ <- _ <-
logger.debug(s"Creating context for job ${job.info} to run cancellation $t") logger.debug(s"Creating context for job ${job.info} to run cancellation $t")
ctx <- Context[F, String](job, job.args, config, logSink, store) ctx <- ContextImpl[F, String](job, job.args, config, logSink, store)
_ <- t.onCancel.run(ctx) _ <- t.onCancel.run(ctx)
_ <- state.modify(_.markCancelled(job)) _ <- state.modify(_.markCancelled(job))
_ <- onFinish(job, JobTaskResult.empty, JobState.Cancelled) _ <- onFinish(job, JobTaskResult.empty, JobState.Cancelled)
@ -196,7 +196,7 @@ final class SchedulerImpl[F[_]: Async](
case Right(t) => case Right(t) =>
for { for {
_ <- logger.debug(s"Creating context for job ${job.info} to run $t") _ <- logger.debug(s"Creating context for job ${job.info} to run $t")
ctx <- Context[F, String](job, job.args, config, logSink, store) ctx <- ContextImpl[F, String](job, job.args, config, logSink, store)
jot = wrapTask(job, t.task, ctx) jot = wrapTask(job, t.task, ctx)
tok <- forkRun(job, jot.run(ctx), t.onCancel.run(ctx), ctx) tok <- forkRun(job, jot.run(ctx), t.onCancel.run(ctx), ctx)
_ <- state.modify(_.addRunning(job, tok)) _ <- state.modify(_.addRunning(job, tok))

View File

@ -0,0 +1,68 @@
package docspell.scheduler.impl
import cats.effect._
import docspell.common.Ident
import docspell.scheduler.{
JobTaskRegistry,
PeriodicSchedulerConfig,
SchedulerConfig,
SchedulerModule
}
case class SchedulerModuleBuilder[F[_]: Async] private (
periodicSchedulerConfig: PeriodicSchedulerConfig,
schedulerBuilder: SchedulerBuilder[F],
jobStoreModule: JobStoreModuleBuilder.Module[F]
) {
private def configureScheduler(
f: SchedulerBuilder[F] => SchedulerBuilder[F]
): SchedulerModuleBuilder[F] =
copy(schedulerBuilder = f(schedulerBuilder))
def withTaskRegistry(reg: JobTaskRegistry[F]): SchedulerModuleBuilder[F] =
configureScheduler(_.withTaskRegistry(reg))
def withSchedulerConfig(cfg: SchedulerConfig): SchedulerModuleBuilder[F] =
configureScheduler(_.withConfig(cfg))
def withPeriodicSchedulerConfig(
cfg: PeriodicSchedulerConfig
): SchedulerModuleBuilder[F] =
copy(periodicSchedulerConfig = cfg)
def resource: Resource[F, SchedulerModule[F]] = {
val queue = JobQueue(jobStoreModule.store)
for {
schedulerR <- schedulerBuilder
.withPubSub(jobStoreModule.pubSubT)
.withEventSink(jobStoreModule.eventSink)
.withQueue(queue)
.resource
periodicTaskSchedulerR <-
PeriodicSchedulerBuilder.resource(
periodicSchedulerConfig,
jobStoreModule.periodicTaskStore,
jobStoreModule.pubSubT
)
} yield new SchedulerModule[F] {
val scheduler = schedulerR
val periodicScheduler = periodicTaskSchedulerR
}
}
}
object SchedulerModuleBuilder {
def apply[F[_]: Async](
jobStoreModule: JobStoreModuleBuilder.Module[F]
): SchedulerModuleBuilder[F] = {
val id = Ident.unsafe("default-node-id")
new SchedulerModuleBuilder(
PeriodicSchedulerConfig.default(id),
SchedulerBuilder(SchedulerConfig.default(id), jobStoreModule.store),
jobStoreModule
)
}
}

View File

@ -0,0 +1,117 @@
package docspell.scheduler.impl
import cats.effect._
import docspell.scheduler.usertask.UserTaskStore
import cats.data.OptionT
import cats.implicits._
import docspell.common._
import docspell.scheduler.usertask._
import docspell.store.{AddResult, Store}
import fs2.Stream
import io.circe._
import QUserTask.UserTaskCodec
final class UserTaskStoreImpl[F[_]: Sync](
store: Store[F],
periodicTaskStore: PeriodicTaskStore[F]
) extends UserTaskStore[F] {
def getAll(scope: UserTaskScope): Stream[F, UserTask[String]] =
store.transact(QUserTask.findAll(scope.toAccountId))
def getByNameRaw(scope: UserTaskScope, name: Ident): Stream[F, UserTask[String]] =
store.transact(QUserTask.findByName(scope.toAccountId, name))
def getByIdRaw(scope: UserTaskScope, id: Ident): OptionT[F, UserTask[String]] =
OptionT(store.transact(QUserTask.findById(scope.toAccountId, id)))
def getByName[A](scope: UserTaskScope, name: Ident)(implicit
D: Decoder[A]
): Stream[F, UserTask[A]] =
getByNameRaw(scope, name).flatMap(_.decode match {
case Right(ua) => Stream.emit(ua)
case Left(err) => Stream.raiseError[F](new Exception(err))
})
def updateTask[A](scope: UserTaskScope, subject: Option[String], ut: UserTask[A])(
implicit E: Encoder[A]
): F[Int] = {
val exists = QUserTask.exists(ut.id)
val insert = QUserTask.insert(scope, subject, ut.encode)
store.add(insert, exists).flatMap {
case AddResult.Success =>
1.pure[F]
case AddResult.EntityExists(_) =>
store.transact(QUserTask.update(scope, subject, ut.encode))
case AddResult.Failure(ex) =>
Sync[F].raiseError(ex)
}
}
def deleteTask(scope: UserTaskScope, id: Ident): F[Int] =
store.transact(QUserTask.delete(scope.toAccountId, id))
def getOneByNameRaw(
scope: UserTaskScope,
name: Ident
): OptionT[F, UserTask[String]] =
OptionT(
getByNameRaw(scope, name)
.take(2)
.compile
.toList
.flatMap {
case Nil => (None: Option[UserTask[String]]).pure[F]
case ut :: Nil => ut.some.pure[F]
case _ => Sync[F].raiseError(new Exception("More than one result found"))
}
)
def getOneByName[A](scope: UserTaskScope, name: Ident)(implicit
D: Decoder[A]
): OptionT[F, UserTask[A]] =
getOneByNameRaw(scope, name)
.semiflatMap(_.decode match {
case Right(ua) => ua.pure[F]
case Left(err) => Sync[F].raiseError(new Exception(err))
})
def updateOneTask[A](
scope: UserTaskScope,
subject: Option[String],
ut: UserTask[A]
)(implicit
E: Encoder[A]
): F[UserTask[String]] =
getByNameRaw(scope, ut.name).compile.toList.flatMap {
case a :: rest =>
val task = ut.copy(id = a.id).encode
for {
_ <- store.transact(QUserTask.update(scope, subject, task))
_ <- store.transact(
rest.traverse(t => QUserTask.delete(scope.toAccountId, t.id))
)
} yield task
case Nil =>
val task = ut.encode
store.transact(QUserTask.insert(scope, subject, task)).map(_ => task)
}
def deleteAll(scope: UserTaskScope, name: Ident): F[Int] =
store.transact(QUserTask.deleteAll(scope.toAccountId, name))
def executeNow[A](scope: UserTaskScope, subject: Option[String], task: UserTask[A])(
implicit E: Encoder[A]
): F[Unit] =
for {
ptask <- task.encode.toPeriodicTask(scope, subject)
_ <- periodicTaskStore.submit(ptask)
} yield ()
}
object UserTaskStoreImpl {
def apply[F[_]: Sync](
store: Store[F],
periodicTaskStore: PeriodicTaskStore[F]
): UserTaskStore[F] =
new UserTaskStoreImpl[F](store, periodicTaskStore)
}

View File

@ -48,22 +48,22 @@ case class RJob(
object RJob { object RJob {
def newJob[A]( def newJob(
id: Ident, id: Ident,
task: Ident, task: Ident,
group: Ident, group: Ident,
args: A, args: String,
subject: String, subject: String,
submitted: Timestamp, submitted: Timestamp,
submitter: Ident, submitter: Ident,
priority: Priority, priority: Priority,
tracker: Option[Ident] tracker: Option[Ident]
)(implicit E: Encoder[A]): RJob = ): RJob =
RJob( RJob(
id, id,
task, task,
group, group,
E(args).noSpaces, args,
subject, subject,
submitted, submitted,
submitter, submitter,
@ -77,6 +77,29 @@ object RJob {
None None
) )
def fromJson[A](
id: Ident,
task: Ident,
group: Ident,
args: A,
subject: String,
submitted: Timestamp,
submitter: Ident,
priority: Priority,
tracker: Option[Ident]
)(implicit E: Encoder[A]): RJob =
newJob(
id,
task,
group,
E(args).noSpaces,
subject,
submitted,
submitter,
priority,
tracker
)
final case class Table(alias: Option[String]) extends TableDef { final case class Table(alias: Option[String]) extends TableDef {
val tableName = "job" val tableName = "job"

View File

@ -30,7 +30,7 @@ class QJobTest extends CatsEffectSuite with StoreFixture with TestLoggingConfig
private val group2 = Ident.unsafe("group2") private val group2 = Ident.unsafe("group2")
def createJob(group: Ident): RJob = def createJob(group: Ident): RJob =
RJob.newJob[Unit]( RJob.fromJson[Unit](
Ident.unsafe(s"job-${c.incrementAndGet()}"), Ident.unsafe(s"job-${c.incrementAndGet()}"),
Ident.unsafe("task"), Ident.unsafe("task"),
group, group,