Refactor scheduler into api / impl

This commit is contained in:
eikek
2022-03-13 14:27:06 +01:00
parent 69765f05ff
commit 3a05dc56cb
50 changed files with 1076 additions and 867 deletions

View File

@ -14,8 +14,7 @@ import docspell.backend.signup.OSignup
import docspell.ftsclient.FtsClient
import docspell.notification.api.{EventExchange, NotificationModule}
import docspell.pubsub.api.PubSubT
import docspell.scheduler.msg.JobQueuePublish
import docspell.scheduler.usertask.UserTaskStore
import docspell.scheduler.JobStoreModule
import docspell.store.Store
import docspell.totp.Totp
import emil.Emil
@ -58,29 +57,43 @@ object BackendApp {
javaEmil: Emil[F],
ftsClient: FtsClient[F],
pubSubT: PubSubT[F],
schedulerModule: JobStoreModule[F],
notificationMod: NotificationModule[F]
): Resource[F, BackendApp[F]] =
for {
utStore <- UserTaskStore(store)
queue <- JobQueuePublish(store, pubSubT, notificationMod)
totpImpl <- OTotp(store, Totp.default)
loginImpl <- Login[F](store, Totp.default)
signupImpl <- OSignup[F](store)
joexImpl <- OJoex(pubSubT)
collImpl <- OCollective[F](store, utStore, queue, joexImpl)
collImpl <- OCollective[F](
store,
schedulerModule.userTasks,
schedulerModule.jobs,
joexImpl
)
sourceImpl <- OSource[F](store)
tagImpl <- OTag[F](store)
equipImpl <- OEquipment[F](store)
orgImpl <- OOrganization(store)
uploadImpl <- OUpload(store, queue, joexImpl)
uploadImpl <- OUpload(store, schedulerModule.jobs, joexImpl)
nodeImpl <- ONode(store)
jobImpl <- OJob(store, joexImpl, pubSubT)
createIndex <- CreateIndex.resource(ftsClient, store)
itemImpl <- OItem(store, ftsClient, createIndex, queue, joexImpl)
itemImpl <- OItem(store, ftsClient, createIndex, schedulerModule.jobs, joexImpl)
itemSearchImpl <- OItemSearch(store)
fulltextImpl <- OFulltext(itemSearchImpl, ftsClient, store, queue, joexImpl)
fulltextImpl <- OFulltext(
itemSearchImpl,
ftsClient,
store,
schedulerModule.jobs,
joexImpl
)
mailImpl <- OMail(store, javaEmil)
userTaskImpl <- OUserTask(utStore, store, queue, joexImpl)
userTaskImpl <- OUserTask(
schedulerModule.userTasks,
store,
joexImpl
)
folderImpl <- OFolder(store)
customFieldsImpl <- OCustomFields(store)
simpleSearchImpl = OSimpleSearch(fulltextImpl, itemSearchImpl)
@ -90,7 +103,7 @@ object BackendApp {
)
notifyImpl <- ONotification(store, notificationMod)
bookmarksImpl <- OQueryBookmarks(store)
fileRepoImpl <- OFileRepository(store, queue, joexImpl)
fileRepoImpl <- OFileRepository(store, schedulerModule.jobs, joexImpl)
} yield new BackendApp[F] {
val pubSub = pubSubT
val login = loginImpl

View File

@ -8,124 +8,91 @@ package docspell.backend
import cats.effect._
import cats.implicits._
import docspell.backend.MailAddressCodec
import docspell.common._
import docspell.notification.api.PeriodicQueryArgs
import docspell.store.records.RJob
import docspell.scheduler.Job
object JobFactory extends MailAddressCodec {
def integrityCheck[F[_]: Sync](
args: FileIntegrityCheckArgs,
submitter: AccountId = DocspellSystem.account
): F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
FileIntegrityCheckArgs.taskName,
submitter.collective,
args,
s"Check integrity of files",
now,
submitter.user,
Priority.High,
Some(FileIntegrityCheckArgs.taskName)
)
} yield job
): F[Job[FileIntegrityCheckArgs]] =
Job.createNew(
FileIntegrityCheckArgs.taskName,
submitter.collective,
args,
s"Check integrity of files",
submitter.user,
Priority.High,
Some(FileIntegrityCheckArgs.taskName)
)
def fileCopy[F[_]: Sync](
args: FileCopyTaskArgs,
submitter: AccountId = DocspellSystem.account
): F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
FileCopyTaskArgs.taskName,
submitter.collective,
args,
s"Copying all files",
now,
submitter.user,
Priority.High,
Some(FileCopyTaskArgs.taskName)
)
} yield job
): F[Job[FileCopyTaskArgs]] =
Job.createNew(
FileCopyTaskArgs.taskName,
submitter.collective,
args,
"Copying all files",
submitter.user,
Priority.High,
Some(FileCopyTaskArgs.taskName)
)
def periodicQuery[F[_]: Sync](args: PeriodicQueryArgs, submitter: AccountId): F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
PeriodicQueryArgs.taskName,
submitter.collective,
args,
s"Running periodic query, notify via ${args.channels.map(_.channelType)}",
now,
submitter.user,
Priority.Low,
None
)
} yield job
def periodicQuery[F[_]: Sync](
args: PeriodicQueryArgs,
submitter: AccountId
): F[Job[PeriodicQueryArgs]] =
Job.createNew(
PeriodicQueryArgs.taskName,
submitter.collective,
args,
s"Running periodic query, notify via ${args.channels.map(_.channelType)}",
submitter.user,
Priority.Low,
None
)
def makePageCount[F[_]: Sync](
args: MakePageCountArgs,
account: Option[AccountId]
): F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
MakePageCountArgs.taskName,
account.map(_.collective).getOrElse(DocspellSystem.taskGroup),
args,
s"Find page-count metadata for ${args.attachment.id}",
now,
account.map(_.user).getOrElse(DocspellSystem.user),
Priority.Low,
Some(MakePageCountArgs.taskName / args.attachment)
)
} yield job
): F[Job[MakePageCountArgs]] =
Job.createNew(
MakePageCountArgs.taskName,
account.map(_.collective).getOrElse(DocspellSystem.taskGroup),
args,
s"Find page-count metadata for ${args.attachment.id}",
account.map(_.user).getOrElse(DocspellSystem.user),
Priority.Low,
Some(MakePageCountArgs.taskName / args.attachment)
)
def makePreview[F[_]: Sync](
args: MakePreviewArgs,
account: Option[AccountId]
): F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
MakePreviewArgs.taskName,
account.map(_.collective).getOrElse(DocspellSystem.taskGroup),
args,
s"Generate preview image",
now,
account.map(_.user).getOrElse(DocspellSystem.user),
Priority.Low,
Some(MakePreviewArgs.taskName / args.attachment)
)
} yield job
): F[Job[MakePreviewArgs]] =
Job.createNew(
MakePreviewArgs.taskName,
account.map(_.collective).getOrElse(DocspellSystem.taskGroup),
args,
s"Generate preview image",
account.map(_.user).getOrElse(DocspellSystem.user),
Priority.Low,
Some(MakePreviewArgs.taskName / args.attachment)
)
def allPreviews[F[_]: Sync](
args: AllPreviewsArgs,
submitter: Option[Ident]
): F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
} yield RJob.newJob(
id,
): F[Job[AllPreviewsArgs]] =
Job.createNew(
AllPreviewsArgs.taskName,
args.collective.getOrElse(DocspellSystem.taskGroup),
args,
"Create preview images",
now,
submitter.getOrElse(DocspellSystem.user),
Priority.Low,
Some(DocspellSystem.allPreviewTaskTracker)
@ -135,127 +102,91 @@ object JobFactory extends MailAddressCodec {
collective: Option[Ident],
submitter: Option[Ident],
prio: Priority
): F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
ConvertAllPdfArgs.taskName,
collective.getOrElse(DocspellSystem.taskGroup),
ConvertAllPdfArgs(collective),
s"Convert all pdfs not yet converted",
now,
submitter.getOrElse(DocspellSystem.user),
prio,
collective
.map(c => c / ConvertAllPdfArgs.taskName)
.orElse(ConvertAllPdfArgs.taskName.some)
)
} yield job
): F[Job[ConvertAllPdfArgs]] =
Job.createNew(
ConvertAllPdfArgs.taskName,
collective.getOrElse(DocspellSystem.taskGroup),
ConvertAllPdfArgs(collective),
s"Convert all pdfs not yet converted",
submitter.getOrElse(DocspellSystem.user),
prio,
collective
.map(c => c / ConvertAllPdfArgs.taskName)
.orElse(ConvertAllPdfArgs.taskName.some)
)
def reprocessItem[F[_]: Sync](
args: ReProcessItemArgs,
account: AccountId,
prio: Priority
): F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
ReProcessItemArgs.taskName,
account.collective,
args,
s"Re-process files of item ${args.itemId.id}",
now,
account.user,
prio,
Some(ReProcessItemArgs.taskName / args.itemId)
)
} yield job
): F[Job[ReProcessItemArgs]] =
Job.createNew(
ReProcessItemArgs.taskName,
account.collective,
args,
s"Re-process files of item ${args.itemId.id}",
account.user,
prio,
Some(ReProcessItemArgs.taskName / args.itemId)
)
def processItem[F[_]: Sync](
args: ProcessItemArgs,
account: AccountId,
prio: Priority,
tracker: Option[Ident]
): F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
job = RJob.newJob(
id,
ProcessItemArgs.taskName,
account.collective,
args,
args.makeSubject,
now,
account.user,
prio,
tracker
)
} yield job
): F[Job[ProcessItemArgs]] =
Job.createNew(
ProcessItemArgs.taskName,
account.collective,
args,
args.makeSubject,
account.user,
prio,
tracker
)
def processItems[F[_]: Sync](
args: Vector[ProcessItemArgs],
account: AccountId,
prio: Priority,
tracker: Option[Ident]
): F[Vector[RJob]] = {
def create(now: Timestamp, arg: ProcessItemArgs): F[RJob] =
Ident
.randomId[F]
.map(id =>
RJob.newJob(
id,
ProcessItemArgs.taskName,
account.collective,
arg,
arg.makeSubject,
now,
account.user,
prio,
tracker
)
)
): F[Vector[Job[ProcessItemArgs]]] = {
def create(arg: ProcessItemArgs): F[Job[ProcessItemArgs]] =
Job.createNew(
ProcessItemArgs.taskName,
account.collective,
arg,
arg.makeSubject,
account.user,
prio,
tracker
)
for {
now <- Timestamp.current[F]
jobs <- args.traverse(a => create(now, a))
} yield jobs
args.traverse(create)
}
def reIndexAll[F[_]: Sync]: F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
} yield RJob.newJob(
id,
def reIndexAll[F[_]: Sync]: F[Job[ReIndexTaskArgs]] =
Job.createNew(
ReIndexTaskArgs.taskName,
DocspellSystem.taskGroup,
ReIndexTaskArgs(None),
s"Recreate full-text index",
now,
"Recreate full-text index",
DocspellSystem.taskGroup,
Priority.Low,
Some(DocspellSystem.migrationTaskTracker)
)
def reIndex[F[_]: Sync](account: AccountId): F[RJob] =
for {
id <- Ident.randomId[F]
now <- Timestamp.current[F]
args = ReIndexTaskArgs(Some(account.collective))
} yield RJob.newJob(
id,
def reIndex[F[_]: Sync](account: AccountId): F[Job[ReIndexTaskArgs]] = {
val args = ReIndexTaskArgs(Some(account.collective))
Job.createNew(
ReIndexTaskArgs.taskName,
account.collective,
args,
s"Recreate full-text index",
now,
"Recreate full-text index",
account.user,
Priority.Low,
Some(ReIndexTaskArgs.tracker(args))
)
}
}

View File

@ -18,7 +18,7 @@ import docspell.store.queries.{QCollective, QUser}
import docspell.store.records._
import docspell.store.{AddResult, Store}
import com.github.eikek.calev._
import docspell.scheduler.JobQueue
import docspell.scheduler.JobStore
import docspell.scheduler.usertask.{UserTask, UserTaskScope, UserTaskStore}
trait OCollective[F[_]] {
@ -131,7 +131,7 @@ object OCollective {
def apply[F[_]: Async](
store: Store[F],
uts: UserTaskStore[F],
queue: JobQueue[F],
jobStore: JobStore[F],
joex: OJoex[F]
): Resource[F, OCollective[F]] =
Resource.pure[F, OCollective[F]](new OCollective[F] {
@ -194,32 +194,32 @@ object OCollective {
for {
id <- Ident.randomId[F]
args = LearnClassifierArgs(collective)
ut <- UserTask(
ut = UserTask(
id,
LearnClassifierArgs.taskName,
true,
CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All),
None,
args
).encode.toPeriodicTask(UserTaskScope(collective), args.makeSubject.some)
job <- ut.toJob
_ <- queue.insert(job)
)
_ <- uts
.updateOneTask(UserTaskScope(collective), args.makeSubject.some, ut)
_ <- joex.notifyAllNodes
} yield ()
def startEmptyTrash(args: EmptyTrashArgs): F[Unit] =
for {
id <- Ident.randomId[F]
ut <- UserTask(
ut = UserTask(
id,
EmptyTrashArgs.taskName,
true,
CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All),
None,
args
).encode.toPeriodicTask(UserTaskScope(args.collective), args.makeSubject.some)
job <- ut.toJob
_ <- queue.insert(job)
)
_ <- uts
.updateOneTask(UserTaskScope(args.collective), args.makeSubject.some, ut)
_ <- joex.notifyAllNodes
} yield ()
@ -319,7 +319,7 @@ object OCollective {
AllPreviewsArgs(Some(account.collective), storeMode),
Some(account.user)
)
_ <- queue.insertIfNew(job)
_ <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UpdateResult.success

View File

@ -12,17 +12,22 @@ import cats.implicits._
import docspell.backend.JobFactory
import docspell.backend.ops.OFileRepository.IntegrityResult
import docspell.common._
import docspell.scheduler.JobQueue
import docspell.scheduler.{Job, JobStore}
import docspell.store.Store
import docspell.store.records.RJob
import scodec.bits.ByteVector
trait OFileRepository[F[_]] {
/** Inserts the job or return None if such a job already is running. */
def cloneFileRepository(args: FileCopyTaskArgs, notifyJoex: Boolean): F[Option[RJob]]
def cloneFileRepository(
args: FileCopyTaskArgs,
notifyJoex: Boolean
): F[Option[Job[FileCopyTaskArgs]]]
def checkIntegrityAll(part: FileKeyPart, notifyJoex: Boolean): F[Option[RJob]]
def checkIntegrityAll(
part: FileKeyPart,
notifyJoex: Boolean
): F[Option[Job[FileIntegrityCheckArgs]]]
def checkIntegrity(key: FileKey, hash: Option[ByteVector]): F[Option[IntegrityResult]]
}
@ -33,7 +38,7 @@ object OFileRepository {
def apply[F[_]: Async](
store: Store[F],
queue: JobQueue[F],
jobStore: JobStore[F],
joex: OJoex[F]
): Resource[F, OFileRepository[F]] =
Resource.pure(new OFileRepository[F] {
@ -42,17 +47,20 @@ object OFileRepository {
def cloneFileRepository(
args: FileCopyTaskArgs,
notifyJoex: Boolean
): F[Option[RJob]] =
): F[Option[Job[FileCopyTaskArgs]]] =
for {
job <- JobFactory.fileCopy(args)
flag <- queue.insertIfNew(job)
flag <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield Option.when(flag)(job)
def checkIntegrityAll(part: FileKeyPart, notifyJoex: Boolean): F[Option[RJob]] =
def checkIntegrityAll(
part: FileKeyPart,
notifyJoex: Boolean
): F[Option[Job[FileIntegrityCheckArgs]]] =
for {
job <- JobFactory.integrityCheck(FileIntegrityCheckArgs(part))
flag <- queue.insertIfNew(job)
flag <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield Option.when(flag)(job)

View File

@ -16,7 +16,7 @@ import docspell.common._
import docspell.ftsclient._
import docspell.query.ItemQuery._
import docspell.query.ItemQueryDsl._
import docspell.scheduler.JobQueue
import docspell.scheduler.JobStore
import docspell.store.queries.{QFolder, QItem, SelectedItem}
import docspell.store.records.RJob
import docspell.store.{Store, qb}
@ -80,7 +80,7 @@ object OFulltext {
itemSearch: OItemSearch[F],
fts: FtsClient[F],
store: Store[F],
queue: JobQueue[F],
jobStore: JobStore[F],
joex: OJoex[F]
): Resource[F, OFulltext[F]] =
Resource.pure[F, OFulltext[F]](new OFulltext[F] {
@ -89,7 +89,7 @@ object OFulltext {
for {
_ <- logger.info(s"Re-index all.")
job <- JobFactory.reIndexAll[F]
_ <- queue.insertIfNew(job) *> joex.notifyAllNodes
_ <- jobStore.insertIfNew(job.encode) *> joex.notifyAllNodes
} yield ()
def reindexCollective(account: AccountId): F[Unit] =
@ -101,7 +101,7 @@ object OFulltext {
job <- JobFactory.reIndex(account)
_ <-
if (exist.isDefined) ().pure[F]
else queue.insertIfNew(job) *> joex.notifyAllNodes
else jobStore.insertIfNew(job.encode) *> joex.notifyAllNodes
} yield ()
def findIndexOnly(maxNoteLen: Int)(
@ -323,9 +323,7 @@ object OFulltext {
def apply[A](implicit ev: ItemId[A]): ItemId[A] = ev
def from[A](f: A => Ident): ItemId[A] =
new ItemId[A] {
def itemId(a: A) = f(a)
}
(a: A) => f(a)
implicit val listItemId: ItemId[ListItem] =
ItemId.from(_.id)

View File

@ -17,7 +17,7 @@ import docspell.common._
import docspell.ftsclient.FtsClient
import docspell.logging.Logger
import docspell.notification.api.Event
import docspell.scheduler.JobQueue
import docspell.scheduler.JobStore
import docspell.store.queries.{QAttachment, QItem, QMoveAttachment}
import docspell.store.records._
import docspell.store.{AddResult, Store, UpdateResult}
@ -226,7 +226,7 @@ object OItem {
store: Store[F],
fts: FtsClient[F],
createIndex: CreateIndex[F],
queue: JobQueue[F],
jobStore: JobStore[F],
joex: OJoex[F]
): Resource[F, OItem[F]] =
for {
@ -286,7 +286,7 @@ object OItem {
)
ev = Event.TagsChanged.partial(
itemIds,
added.toList.flatten.map(_.id).toList,
added.toList.flatten.map(_.id),
Nil
)
} yield AttachedEvent(UpdateResult.success)(ev))
@ -761,7 +761,7 @@ object OItem {
job <- OptionT.liftF(
JobFactory.reprocessItem[F](args, account, Priority.Low)
)
_ <- OptionT.liftF(queue.insertIfNew(job))
_ <- OptionT.liftF(jobStore.insertIfNew(job.encode))
_ <- OptionT.liftF(if (notifyJoex) joex.notifyAllNodes else ().pure[F])
} yield UpdateResult.success).getOrElse(UpdateResult.notFound)
@ -775,7 +775,8 @@ object OItem {
jobs <- items
.map(item => ReProcessItemArgs(item, Nil))
.traverse(arg => JobFactory.reprocessItem[F](arg, account, Priority.Low))
_ <- queue.insertAllIfNew(jobs)
.map(_.map(_.encode))
_ <- jobStore.insertAllIfNew(jobs)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield items.size)
@ -786,7 +787,7 @@ object OItem {
): F[UpdateResult] =
for {
job <- JobFactory.convertAllPdfs[F](collective, submitter, Priority.Low)
_ <- queue.insertIfNew(job)
_ <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UpdateResult.success
@ -797,7 +798,7 @@ object OItem {
): F[UpdateResult] =
for {
job <- JobFactory.makePreview[F](args, account.some)
_ <- queue.insertIfNew(job)
_ <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UpdateResult.success
@ -807,7 +808,7 @@ object OItem {
): F[UpdateResult] =
for {
job <- JobFactory.allPreviews[F](AllPreviewsArgs(None, storeMode), None)
_ <- queue.insertIfNew(job)
_ <- jobStore.insertIfNew(job.encode)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UpdateResult.success

View File

@ -13,7 +13,7 @@ import cats.implicits._
import fs2.Stream
import docspell.backend.JobFactory
import docspell.common._
import docspell.scheduler.JobQueue
import docspell.scheduler.{Job, JobStore}
import docspell.store.Store
import docspell.store.records._
@ -107,7 +107,7 @@ object OUpload {
def apply[F[_]: Sync](
store: Store[F],
queue: JobQueue[F],
jobStore: JobStore[F],
joex: OJoex[F]
): Resource[F, OUpload[F]] =
Resource.pure[F, OUpload[F]](new OUpload[F] {
@ -186,10 +186,10 @@ object OUpload {
private def submitJobs(
notifyJoex: Boolean
)(jobs: Vector[RJob]): F[OUpload.UploadResult] =
)(jobs: Vector[Job[String]]): F[OUpload.UploadResult] =
for {
_ <- logger.debug(s"Storing jobs: $jobs")
_ <- queue.insertAll(jobs)
_ <- jobStore.insertAll(jobs)
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
} yield UploadResult.Success
@ -243,7 +243,9 @@ object OUpload {
account: AccountId,
prio: Priority,
tracker: Option[Ident]
): F[Vector[RJob]] =
JobFactory.processItems[F](args, account, prio, tracker)
): F[Vector[Job[String]]] =
JobFactory
.processItems[F](args, account, prio, tracker)
.map(_.map(_.encode))
})
}

View File

@ -12,7 +12,6 @@ import cats.implicits._
import fs2.Stream
import docspell.common._
import docspell.notification.api.{ChannelRef, PeriodicDueItemsArgs, PeriodicQueryArgs}
import docspell.scheduler.JobQueue
import docspell.scheduler.usertask.{UserTask, UserTaskScope, UserTaskStore}
import docspell.store.Store
import docspell.store.records.RNotificationChannel
@ -84,7 +83,6 @@ object OUserTask {
def apply[F[_]: Async](
taskStore: UserTaskStore[F],
store: Store[F],
queue: JobQueue[F],
joex: OJoex[F]
): Resource[F, OUserTask[F]] =
Resource.pure[F, OUserTask[F]](new OUserTask[F] {
@ -93,10 +91,8 @@ object OUserTask {
implicit E: Encoder[A]
): F[Unit] =
for {
ptask <- task.encode.toPeriodicTask(scope, subject)
job <- ptask.toJob
_ <- queue.insert(job)
_ <- joex.notifyPeriodicTasks
_ <- taskStore.executeNow(scope, subject, task)
_ <- joex.notifyAllNodes
} yield ()
def getScanMailbox(scope: UserTaskScope): Stream[F, UserTask[ScanMailboxArgs]] =