mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-04-05 19:09:32 +00:00
Adopt joex to new collective-id
This commit is contained in:
parent
eabcffe71a
commit
816cca7ea2
@ -86,7 +86,7 @@ object ODownloadAll {
|
|||||||
): F[DownloadSummary] = for {
|
): F[DownloadSummary] = for {
|
||||||
_ <- logger.info(s"Download all request: $req")
|
_ <- logger.info(s"Download all request: $req")
|
||||||
summary <- getSummary(account, req)
|
summary <- getSummary(account, req)
|
||||||
args = DownloadZipArgs(account.asAccountId, req)
|
args = DownloadZipArgs(account, req)
|
||||||
_ <- OptionT
|
_ <- OptionT
|
||||||
.whenF(summary.state == DownloadState.NotPresent) {
|
.whenF(summary.state == DownloadState.NotPresent) {
|
||||||
JobFactory
|
JobFactory
|
||||||
|
@ -8,11 +8,10 @@ package docspell.backend.task
|
|||||||
|
|
||||||
import docspell.backend.ops.ODownloadAll.model.DownloadRequest
|
import docspell.backend.ops.ODownloadAll.model.DownloadRequest
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
|
||||||
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
import io.circe.{Decoder, Encoder}
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
final case class DownloadZipArgs(accountId: AccountId, req: DownloadRequest)
|
final case class DownloadZipArgs(account: AccountInfo, req: DownloadRequest)
|
||||||
|
|
||||||
object DownloadZipArgs {
|
object DownloadZipArgs {
|
||||||
val taskName: Ident = Ident.unsafe("download-query-zip")
|
val taskName: Ident = Ident.unsafe("download-query-zip")
|
||||||
|
@ -84,7 +84,7 @@ final class JoexAppImpl[F[_]: Async](
|
|||||||
.evalMap { es =>
|
.evalMap { es =>
|
||||||
val args = EmptyTrashArgs(es.cid, es.minAge)
|
val args = EmptyTrashArgs(es.cid, es.minAge)
|
||||||
uts.updateOneTask(
|
uts.updateOneTask(
|
||||||
UserTaskScope(args.collective),
|
UserTaskScope.collective(args.collective),
|
||||||
args.makeSubject.some,
|
args.makeSubject.some,
|
||||||
EmptyTrashTask.userTask(args, es.schedule)
|
EmptyTrashTask.userTask(args, es.schedule)
|
||||||
)
|
)
|
||||||
|
@ -49,7 +49,7 @@ object GenericItemAddonTask extends LoggerExtension {
|
|||||||
trigger: AddonTriggerType,
|
trigger: AddonTriggerType,
|
||||||
addonTaskIds: Set[Ident]
|
addonTaskIds: Set[Ident]
|
||||||
)(
|
)(
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
data: ItemData,
|
data: ItemData,
|
||||||
maybeMeta: Option[ProcessItemArgs.ProcessMeta]
|
maybeMeta: Option[ProcessItemArgs.ProcessMeta]
|
||||||
): Task[F, Unit, ItemData] =
|
): Task[F, Unit, ItemData] =
|
||||||
@ -63,7 +63,7 @@ object GenericItemAddonTask extends LoggerExtension {
|
|||||||
trigger: AddonTriggerType,
|
trigger: AddonTriggerType,
|
||||||
addonTaskIds: Set[Ident]
|
addonTaskIds: Set[Ident]
|
||||||
)(
|
)(
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
data: ItemData,
|
data: ItemData,
|
||||||
maybeMeta: Option[ProcessItemArgs.ProcessMeta]
|
maybeMeta: Option[ProcessItemArgs.ProcessMeta]
|
||||||
): Task[F, Unit, ExecResult] =
|
): Task[F, Unit, ExecResult] =
|
||||||
|
@ -18,7 +18,7 @@ import docspell.store.queries.QCollective
|
|||||||
import io.circe.generic.semiauto._
|
import io.circe.generic.semiauto._
|
||||||
import io.circe.{Decoder, Encoder}
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
case class NerFile(collective: Ident, updated: Timestamp, creation: Timestamp) {
|
case class NerFile(collective: CollectiveId, updated: Timestamp, creation: Timestamp) {
|
||||||
def nerFilePath(directory: Path): Path =
|
def nerFilePath(directory: Path): Path =
|
||||||
NerFile.nerFilePath(directory, collective)
|
NerFile.nerFilePath(directory, collective)
|
||||||
|
|
||||||
@ -33,14 +33,14 @@ object NerFile {
|
|||||||
implicit val jsonEncoder: Encoder[NerFile] =
|
implicit val jsonEncoder: Encoder[NerFile] =
|
||||||
deriveEncoder[NerFile]
|
deriveEncoder[NerFile]
|
||||||
|
|
||||||
private def nerFilePath(directory: Path, collective: Ident): Path =
|
private def nerFilePath(directory: Path, collective: CollectiveId): Path =
|
||||||
directory.resolve(s"${collective.id}.txt")
|
directory.resolve(s"${collective.value}.txt")
|
||||||
|
|
||||||
private def jsonFilePath(directory: Path, collective: Ident): Path =
|
private def jsonFilePath(directory: Path, collective: CollectiveId): Path =
|
||||||
directory.resolve(s"${collective.id}.json")
|
directory.resolve(s"${collective.value}.json")
|
||||||
|
|
||||||
def find[F[_]: Async](
|
def find[F[_]: Async](
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
directory: Path
|
directory: Path
|
||||||
): F[Option[NerFile]] = {
|
): F[Option[NerFile]] = {
|
||||||
val file = jsonFilePath(directory, collective)
|
val file = jsonFilePath(directory, collective)
|
||||||
|
@ -24,7 +24,7 @@ import io.circe.syntax._
|
|||||||
/** Maintains a custom regex-ner file per collective for stanford's regexner annotator. */
|
/** Maintains a custom regex-ner file per collective for stanford's regexner annotator. */
|
||||||
trait RegexNerFile[F[_]] {
|
trait RegexNerFile[F[_]] {
|
||||||
|
|
||||||
def makeFile(collective: Ident): F[Option[Path]]
|
def makeFile(collective: CollectiveId): F[Option[Path]]
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,11 +49,11 @@ object RegexNerFile {
|
|||||||
|
|
||||||
private[this] val logger = docspell.logging.getLogger[F]
|
private[this] val logger = docspell.logging.getLogger[F]
|
||||||
|
|
||||||
def makeFile(collective: Ident): F[Option[Path]] =
|
def makeFile(collective: CollectiveId): F[Option[Path]] =
|
||||||
if (cfg.maxEntries > 0) doMakeFile(collective)
|
if (cfg.maxEntries > 0) doMakeFile(collective)
|
||||||
else (None: Option[Path]).pure[F]
|
else (None: Option[Path]).pure[F]
|
||||||
|
|
||||||
def doMakeFile(collective: Ident): F[Option[Path]] =
|
def doMakeFile(collective: CollectiveId): F[Option[Path]] =
|
||||||
for {
|
for {
|
||||||
now <- Timestamp.current[F]
|
now <- Timestamp.current[F]
|
||||||
existing <- NerFile.find[F](collective, cfg.directory)
|
existing <- NerFile.find[F](collective, cfg.directory)
|
||||||
@ -75,7 +75,7 @@ object RegexNerFile {
|
|||||||
} yield result
|
} yield result
|
||||||
|
|
||||||
private def updateFile(
|
private def updateFile(
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
now: Timestamp,
|
now: Timestamp,
|
||||||
current: Option[NerFile]
|
current: Option[NerFile]
|
||||||
): F[Option[Path]] =
|
): F[Option[Path]] =
|
||||||
@ -95,7 +95,7 @@ object RegexNerFile {
|
|||||||
) *> cur.pure[F]
|
) *> cur.pure[F]
|
||||||
else
|
else
|
||||||
logger.debug(
|
logger.debug(
|
||||||
s"There have been state changes for collective '${collective.id}'. Reload NER file."
|
s"There have been state changes for collective '${collective.value}'. Reload NER file."
|
||||||
) *> createFile(lup, collective, now)
|
) *> createFile(lup, collective, now)
|
||||||
nerf.map(_.nerFilePath(cfg.directory).some)
|
nerf.map(_.nerFilePath(cfg.directory).some)
|
||||||
case None =>
|
case None =>
|
||||||
@ -119,7 +119,7 @@ object RegexNerFile {
|
|||||||
|
|
||||||
private def createFile(
|
private def createFile(
|
||||||
lastUpdate: Timestamp,
|
lastUpdate: Timestamp,
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
now: Timestamp
|
now: Timestamp
|
||||||
): F[NerFile] = {
|
): F[NerFile] = {
|
||||||
def update(nf: NerFile, text: String): F[Unit] =
|
def update(nf: NerFile, text: String): F[Unit] =
|
||||||
@ -127,7 +127,7 @@ object RegexNerFile {
|
|||||||
for {
|
for {
|
||||||
jsonFile <- Sync[F].pure(nf.jsonFilePath(cfg.directory))
|
jsonFile <- Sync[F].pure(nf.jsonFilePath(cfg.directory))
|
||||||
_ <- logger.debug(
|
_ <- logger.debug(
|
||||||
s"Writing custom NER file for collective '${collective.id}'"
|
s"Writing custom NER file for collective '${collective.value}'"
|
||||||
)
|
)
|
||||||
_ <- jsonFile.parent match {
|
_ <- jsonFile.parent match {
|
||||||
case Some(p) => File.mkDir(p)
|
case Some(p) => File.mkDir(p)
|
||||||
@ -139,7 +139,9 @@ object RegexNerFile {
|
|||||||
)
|
)
|
||||||
|
|
||||||
for {
|
for {
|
||||||
_ <- logger.info(s"Generating custom NER file for collective '${collective.id}'")
|
_ <- logger.info(
|
||||||
|
s"Generating custom NER file for collective '${collective.value}'"
|
||||||
|
)
|
||||||
names <- store.transact(QCollective.allNames(collective, cfg.maxEntries))
|
names <- store.transact(QCollective.allNames(collective, cfg.maxEntries))
|
||||||
nerFile = NerFile(collective, lastUpdate, now)
|
nerFile = NerFile(collective, lastUpdate, now)
|
||||||
_ <- update(nerFile, NerFile.mkNerConfig(names))
|
_ <- update(nerFile, NerFile.mkNerConfig(names))
|
||||||
@ -152,8 +154,8 @@ object RegexNerFile {
|
|||||||
import docspell.store.qb.DSL._
|
import docspell.store.qb.DSL._
|
||||||
import docspell.store.qb._
|
import docspell.store.qb._
|
||||||
|
|
||||||
def latestUpdate(collective: Ident): ConnectionIO[Option[Timestamp]] = {
|
def latestUpdate(collective: CollectiveId): ConnectionIO[Option[Timestamp]] = {
|
||||||
def max_(col: Column[_], cidCol: Column[Ident]): Select =
|
def max_(col: Column[_], cidCol: Column[CollectiveId]): Select =
|
||||||
Select(max(col).as("t"), from(col.table), cidCol === collective)
|
Select(max(col).as("t"), from(col.table), cidCol === collective)
|
||||||
|
|
||||||
val sql = union(
|
val sql = union(
|
||||||
|
@ -35,7 +35,7 @@ object DownloadZipTask {
|
|||||||
): Task[F, Args, Result] =
|
): Task[F, Args, Result] =
|
||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
val req = ctx.args.req
|
val req = ctx.args.req
|
||||||
val query = req.toQuery(ctx.args.accountId)
|
val query = req.toQuery(ctx.args.account)
|
||||||
|
|
||||||
val allFiles =
|
val allFiles =
|
||||||
Stream
|
Stream
|
||||||
@ -53,7 +53,7 @@ object DownloadZipTask {
|
|||||||
.through(Zip[F](ctx.logger.some).zip(chunkSize))
|
.through(Zip[F](ctx.logger.some).zip(chunkSize))
|
||||||
.through(
|
.through(
|
||||||
store.fileRepo.save(
|
store.fileRepo.save(
|
||||||
ctx.args.accountId.collective,
|
ctx.args.account.collectiveId,
|
||||||
FileCategory.DownloadAll,
|
FileCategory.DownloadAll,
|
||||||
MimeTypeHint.advertised("application/zip")
|
MimeTypeHint.advertised("application/zip")
|
||||||
)
|
)
|
||||||
@ -61,10 +61,10 @@ object DownloadZipTask {
|
|||||||
|
|
||||||
for {
|
for {
|
||||||
_ <- ctx.logger.info(s"Start zipping ${req.itemQueryString}")
|
_ <- ctx.logger.info(s"Start zipping ${req.itemQueryString}")
|
||||||
summary <- downloadOps.getSummary(ctx.args.accountId, req)
|
summary <- downloadOps.getSummary(ctx.args.account, req)
|
||||||
_ <- ctx.logger.debug(s"Summary: $summary")
|
_ <- ctx.logger.debug(s"Summary: $summary")
|
||||||
file <- storeZipFile.compile.lastOrError
|
file <- storeZipFile.compile.lastOrError
|
||||||
row <- createRow(summary, ctx.args.accountId.collective, file)
|
row <- createRow(summary, ctx.args.account.collectiveId, file)
|
||||||
_ <- ctx.logger.debug(s"Inserting zip file: $row")
|
_ <- ctx.logger.debug(s"Inserting zip file: $row")
|
||||||
_ <- store.transact(RDownloadQuery.insert(row))
|
_ <- store.transact(RDownloadQuery.insert(row))
|
||||||
} yield Result(summary.fileCount)
|
} yield Result(summary.fileCount)
|
||||||
@ -92,7 +92,7 @@ object DownloadZipTask {
|
|||||||
|
|
||||||
def createRow[F[_]: Sync](
|
def createRow[F[_]: Sync](
|
||||||
summary: DownloadSummary,
|
summary: DownloadSummary,
|
||||||
cid: Ident,
|
cid: CollectiveId,
|
||||||
file: FileKey
|
file: FileKey
|
||||||
): F[RDownloadQuery] =
|
): F[RDownloadQuery] =
|
||||||
Timestamp.current[F].map { now =>
|
Timestamp.current[F].map { now =>
|
||||||
|
@ -68,7 +68,7 @@ object EmptyTrashTask {
|
|||||||
.foldMonoid
|
.foldMonoid
|
||||||
|
|
||||||
private def deleteChunk[F[_]: Async](
|
private def deleteChunk[F[_]: Async](
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
itemOps: OItem[F],
|
itemOps: OItem[F],
|
||||||
ctx: Context[F, _]
|
ctx: Context[F, _]
|
||||||
)(chunk: Vector[RItem]): F[Int] =
|
)(chunk: Vector[RItem]): F[Int] =
|
||||||
|
@ -74,10 +74,10 @@ object FtsWork {
|
|||||||
def log[F[_]](f: Logger[F] => F[Unit]): FtsWork[F] =
|
def log[F[_]](f: Logger[F] => F[Unit]): FtsWork[F] =
|
||||||
FtsWork(ctx => f(ctx.logger))
|
FtsWork(ctx => f(ctx.logger))
|
||||||
|
|
||||||
def clearIndex[F[_]: FlatMap](coll: Option[Ident]): FtsWork[F] =
|
def clearIndex[F[_]: FlatMap](coll: Option[CollectiveId]): FtsWork[F] =
|
||||||
coll match {
|
coll match {
|
||||||
case Some(cid) =>
|
case Some(cid) =>
|
||||||
log[F](_.debug(s"Clearing index data for collective '${cid.id}'")) ++ FtsWork(
|
log[F](_.debug(s"Clearing index data for collective '${cid.value}'")) ++ FtsWork(
|
||||||
ctx => ctx.fts.clear(ctx.logger, cid)
|
ctx => ctx.fts.clear(ctx.logger, cid)
|
||||||
)
|
)
|
||||||
case None =>
|
case None =>
|
||||||
@ -86,7 +86,7 @@ object FtsWork {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
def insertAll[F[_]: FlatMap](coll: Option[Ident]): FtsWork[F] =
|
def insertAll[F[_]: FlatMap](coll: Option[CollectiveId]): FtsWork[F] =
|
||||||
log[F](_.info("Inserting all data to index")) ++ FtsWork
|
log[F](_.info("Inserting all data to index")) ++ FtsWork
|
||||||
.all(
|
.all(
|
||||||
FtsWork(ctx =>
|
FtsWork(ctx =>
|
||||||
|
@ -8,11 +8,11 @@ package docspell.joex.fts
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
import docspell.backend.fulltext.CreateIndex
|
import docspell.backend.fulltext.CreateIndex
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.ftsclient._
|
import docspell.ftsclient._
|
||||||
import docspell.joex.Config
|
import docspell.joex.Config
|
||||||
|
import docspell.scheduler.usertask.UserTaskScope
|
||||||
import docspell.scheduler.{Job, Task}
|
import docspell.scheduler.{Job, Task}
|
||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
|
|
||||||
@ -43,10 +43,9 @@ object MigrationTask {
|
|||||||
Job
|
Job
|
||||||
.createNew(
|
.createNew(
|
||||||
taskName,
|
taskName,
|
||||||
DocspellSystem.taskGroup,
|
UserTaskScope.system,
|
||||||
(),
|
(),
|
||||||
"Create full-text index",
|
"Create full-text index",
|
||||||
DocspellSystem.taskGroup,
|
|
||||||
Priority.Low,
|
Priority.Low,
|
||||||
Some(DocspellSystem.migrationTaskTracker)
|
Some(DocspellSystem.migrationTaskTracker)
|
||||||
)
|
)
|
||||||
|
@ -40,7 +40,7 @@ object ReIndexTask {
|
|||||||
def onCancel[F[_]]: Task[F, Args, Unit] =
|
def onCancel[F[_]]: Task[F, Args, Unit] =
|
||||||
Task.log[F, Args](_.warn("Cancelling full-text re-index task"))
|
Task.log[F, Args](_.warn("Cancelling full-text re-index task"))
|
||||||
|
|
||||||
private def clearData[F[_]: Async](collective: Option[Ident]): FtsWork[F] =
|
private def clearData[F[_]: Async](collective: Option[CollectiveId]): FtsWork[F] =
|
||||||
FtsWork.log[F](_.info("Clearing index data")) ++
|
FtsWork.log[F](_.info("Clearing index data")) ++
|
||||||
(collective match {
|
(collective match {
|
||||||
case Some(_) =>
|
case Some(_) =>
|
||||||
|
@ -8,10 +8,8 @@ package docspell.joex.learn
|
|||||||
|
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import docspell.common.CollectiveId
|
||||||
import docspell.common.Ident
|
|
||||||
import docspell.store.records.{RClassifierModel, RClassifierSetting}
|
import docspell.store.records.{RClassifierModel, RClassifierSetting}
|
||||||
|
|
||||||
import doobie._
|
import doobie._
|
||||||
|
|
||||||
final class ClassifierName(val name: String) extends AnyVal
|
final class ClassifierName(val name: String) extends AnyVal
|
||||||
@ -37,12 +35,12 @@ object ClassifierName {
|
|||||||
val correspondentPerson: ClassifierName =
|
val correspondentPerson: ClassifierName =
|
||||||
apply("correspondentperson")
|
apply("correspondentperson")
|
||||||
|
|
||||||
def findTagClassifiers[F[_]](coll: Ident): ConnectionIO[List[ClassifierName]] =
|
def findTagClassifiers(coll: CollectiveId): ConnectionIO[List[ClassifierName]] =
|
||||||
for {
|
for {
|
||||||
categories <- RClassifierSetting.getActiveCategories(coll)
|
categories <- RClassifierSetting.getActiveCategories(coll)
|
||||||
} yield categories.map(tagCategory)
|
} yield categories.map(tagCategory)
|
||||||
|
|
||||||
def findTagModels[F[_]](coll: Ident): ConnectionIO[List[RClassifierModel]] =
|
def findTagModels(coll: CollectiveId): ConnectionIO[List[RClassifierModel]] =
|
||||||
for {
|
for {
|
||||||
categories <- RClassifierSetting.getActiveCategories(coll)
|
categories <- RClassifierSetting.getActiveCategories(coll)
|
||||||
models <- NonEmptyList.fromList(categories) match {
|
models <- NonEmptyList.fromList(categories) match {
|
||||||
@ -53,7 +51,9 @@ object ClassifierName {
|
|||||||
}
|
}
|
||||||
} yield models
|
} yield models
|
||||||
|
|
||||||
def findOrphanTagModels[F[_]](coll: Ident): ConnectionIO[List[RClassifierModel]] =
|
def findOrphanTagModels(
|
||||||
|
coll: CollectiveId
|
||||||
|
): ConnectionIO[List[RClassifierModel]] =
|
||||||
for {
|
for {
|
||||||
cats <- RClassifierSetting.getActiveCategories(coll)
|
cats <- RClassifierSetting.getActiveCategories(coll)
|
||||||
allModels = RClassifierModel.findAllByQuery(coll, s"$categoryPrefix%")
|
allModels = RClassifierModel.findAllByQuery(coll, s"$categoryPrefix%")
|
||||||
|
@ -26,7 +26,7 @@ object Classify {
|
|||||||
workingDir: Path,
|
workingDir: Path,
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
classifier: TextClassifier[F],
|
classifier: TextClassifier[F],
|
||||||
coll: Ident,
|
coll: CollectiveId,
|
||||||
text: String
|
text: String
|
||||||
)(cname: ClassifierName): F[Option[String]] =
|
)(cname: ClassifierName): F[Option[String]] =
|
||||||
(for {
|
(for {
|
||||||
|
@ -48,7 +48,7 @@ object LearnClassifierTask {
|
|||||||
.learnAll(
|
.learnAll(
|
||||||
analyser,
|
analyser,
|
||||||
store,
|
store,
|
||||||
ctx.args.collective,
|
ctx.args.collectiveId,
|
||||||
cfg.classification.itemCount,
|
cfg.classification.itemCount,
|
||||||
cfg.maxLength
|
cfg.maxLength
|
||||||
)
|
)
|
||||||
@ -69,7 +69,7 @@ object LearnClassifierTask {
|
|||||||
_ <- OptionT.liftF(
|
_ <- OptionT.liftF(
|
||||||
LearnTags
|
LearnTags
|
||||||
.learnAllTagCategories(analyser, store)(
|
.learnAllTagCategories(analyser, store)(
|
||||||
ctx.args.collective,
|
ctx.args.collectiveId,
|
||||||
maxItems,
|
maxItems,
|
||||||
cfg.maxLength
|
cfg.maxLength
|
||||||
)
|
)
|
||||||
@ -82,7 +82,7 @@ object LearnClassifierTask {
|
|||||||
clearObsoleteTagModels(ctx, store) *>
|
clearObsoleteTagModels(ctx, store) *>
|
||||||
// when tags are deleted, categories may get removed. fix the json array
|
// when tags are deleted, categories may get removed. fix the json array
|
||||||
store
|
store
|
||||||
.transact(RClassifierSetting.fixCategoryList(ctx.args.collective))
|
.transact(RClassifierSetting.fixCategoryList(ctx.args.collectiveId))
|
||||||
.map(_ => ())
|
.map(_ => ())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -92,7 +92,7 @@ object LearnClassifierTask {
|
|||||||
): F[Unit] =
|
): F[Unit] =
|
||||||
for {
|
for {
|
||||||
list <- store.transact(
|
list <- store.transact(
|
||||||
ClassifierName.findOrphanTagModels(ctx.args.collective)
|
ClassifierName.findOrphanTagModels(ctx.args.collectiveId)
|
||||||
)
|
)
|
||||||
_ <- ctx.logger.info(
|
_ <- ctx.logger.info(
|
||||||
s"Found ${list.size} obsolete model files that are deleted now."
|
s"Found ${list.size} obsolete model files that are deleted now."
|
||||||
@ -110,7 +110,7 @@ object LearnClassifierTask {
|
|||||||
cfg: Config.TextAnalysis
|
cfg: Config.TextAnalysis
|
||||||
): OptionT[F, OCollective.Classifier] =
|
): OptionT[F, OCollective.Classifier] =
|
||||||
if (cfg.classification.enabled)
|
if (cfg.classification.enabled)
|
||||||
OptionT(store.transact(RClassifierSetting.findById(ctx.args.collective)))
|
OptionT(store.transact(RClassifierSetting.findById(ctx.args.collectiveId)))
|
||||||
.filter(_.autoTagEnabled)
|
.filter(_.autoTagEnabled)
|
||||||
.map(OCollective.Classifier.fromRecord)
|
.map(OCollective.Classifier.fromRecord)
|
||||||
else
|
else
|
||||||
|
@ -21,7 +21,7 @@ object LearnItemEntities {
|
|||||||
def learnAll[F[_]: Async, A](
|
def learnAll[F[_]: Async, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
): Task[F, A, Unit] =
|
): Task[F, A, Unit] =
|
||||||
@ -35,7 +35,7 @@ object LearnItemEntities {
|
|||||||
def learnCorrOrg[F[_]: Async, A](
|
def learnCorrOrg[F[_]: Async, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
): Task[F, A, Unit] =
|
): Task[F, A, Unit] =
|
||||||
@ -47,7 +47,7 @@ object LearnItemEntities {
|
|||||||
def learnCorrPerson[F[_]: Async, A](
|
def learnCorrPerson[F[_]: Async, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
): Task[F, A, Unit] =
|
): Task[F, A, Unit] =
|
||||||
@ -59,7 +59,7 @@ object LearnItemEntities {
|
|||||||
def learnConcPerson[F[_]: Async, A](
|
def learnConcPerson[F[_]: Async, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
): Task[F, A, Unit] =
|
): Task[F, A, Unit] =
|
||||||
@ -71,7 +71,7 @@ object LearnItemEntities {
|
|||||||
def learnConcEquip[F[_]: Async, A](
|
def learnConcEquip[F[_]: Async, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
): Task[F, A, Unit] =
|
): Task[F, A, Unit] =
|
||||||
@ -83,7 +83,7 @@ object LearnItemEntities {
|
|||||||
private def learn[F[_]: Async, A](
|
private def learn[F[_]: Async, A](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
collective: Ident
|
collective: CollectiveId
|
||||||
)(cname: ClassifierName, data: Context[F, _] => Stream[F, Data]): Task[F, A, Unit] =
|
)(cname: ClassifierName, data: Context[F, _] => Stream[F, Data]): Task[F, A, Unit] =
|
||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
ctx.logger.info(s"Learn classifier ${cname.name}") *>
|
ctx.logger.info(s"Learn classifier ${cname.name}") *>
|
||||||
|
@ -21,7 +21,7 @@ object LearnTags {
|
|||||||
def learnTagCategory[F[_]: Async, A](
|
def learnTagCategory[F[_]: Async, A](
|
||||||
analyser: TextAnalyser[F],
|
analyser: TextAnalyser[F],
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
)(
|
)(
|
||||||
@ -44,7 +44,7 @@ object LearnTags {
|
|||||||
}
|
}
|
||||||
|
|
||||||
def learnAllTagCategories[F[_]: Async, A](analyser: TextAnalyser[F], store: Store[F])(
|
def learnAllTagCategories[F[_]: Async, A](analyser: TextAnalyser[F], store: Store[F])(
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
): Task[F, A, Unit] =
|
): Task[F, A, Unit] =
|
||||||
|
@ -20,7 +20,7 @@ object SelectItems {
|
|||||||
val pageSep = LearnClassifierTask.pageSep
|
val pageSep = LearnClassifierTask.pageSep
|
||||||
val noClass = LearnClassifierTask.noClass
|
val noClass = LearnClassifierTask.noClass
|
||||||
|
|
||||||
def forCategory[F[_]](store: Store[F], collective: Ident)(
|
def forCategory[F[_]](store: Store[F], collective: CollectiveId)(
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
category: String,
|
category: String,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
@ -36,7 +36,7 @@ object SelectItems {
|
|||||||
|
|
||||||
def forCorrOrg[F[_]](
|
def forCorrOrg[F[_]](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
): Stream[F, Data] = {
|
): Stream[F, Data] = {
|
||||||
@ -51,7 +51,7 @@ object SelectItems {
|
|||||||
|
|
||||||
def forCorrPerson[F[_]](
|
def forCorrPerson[F[_]](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
): Stream[F, Data] = {
|
): Stream[F, Data] = {
|
||||||
@ -66,7 +66,7 @@ object SelectItems {
|
|||||||
|
|
||||||
def forConcPerson[F[_]](
|
def forConcPerson[F[_]](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
): Stream[F, Data] = {
|
): Stream[F, Data] = {
|
||||||
@ -81,7 +81,7 @@ object SelectItems {
|
|||||||
|
|
||||||
def forConcEquip[F[_]](
|
def forConcEquip[F[_]](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
maxItems: Int,
|
maxItems: Int,
|
||||||
maxTextLen: Int
|
maxTextLen: Int
|
||||||
): Stream[F, Data] = {
|
): Stream[F, Data] = {
|
||||||
@ -94,7 +94,10 @@ object SelectItems {
|
|||||||
store.transact(connStream)
|
store.transact(connStream)
|
||||||
}
|
}
|
||||||
|
|
||||||
private def allItems(collective: Ident, max: Int): Stream[ConnectionIO, Ident] = {
|
private def allItems(
|
||||||
|
collective: CollectiveId,
|
||||||
|
max: Int
|
||||||
|
): Stream[ConnectionIO, Ident] = {
|
||||||
val limit = if (max <= 0) Batch.all else Batch.limit(max)
|
val limit = if (max <= 0) Batch.all else Batch.limit(max)
|
||||||
QItem.findAllNewesFirst(collective, 10, limit)
|
QItem.findAllNewesFirst(collective, 10, limit)
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,7 @@ object StoreClassifierModel {
|
|||||||
def handleModel[F[_]: Async](
|
def handleModel[F[_]: Async](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
modelName: ClassifierName
|
modelName: ClassifierName
|
||||||
)(
|
)(
|
||||||
trainedModel: ClassifierModel
|
trainedModel: ClassifierModel
|
||||||
|
@ -11,12 +11,12 @@ import cats.data.OptionT
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
|
||||||
import docspell.backend.JobFactory
|
import docspell.backend.JobFactory
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.common.util.Zip
|
import docspell.common.util.Zip
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
import docspell.scheduler._
|
import docspell.scheduler._
|
||||||
|
import docspell.scheduler.usertask.UserTaskScope
|
||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
|
|
||||||
/** Task to submit multiple files at once. By default, one file in an upload results in
|
/** Task to submit multiple files at once. By default, one file in an upload results in
|
||||||
@ -90,7 +90,7 @@ object MultiUploadArchiveTask {
|
|||||||
submitter = currentJob.map(_.submitter).getOrElse(DocspellSystem.user)
|
submitter = currentJob.map(_.submitter).getOrElse(DocspellSystem.user)
|
||||||
job <- JobFactory.processItem(
|
job <- JobFactory.processItem(
|
||||||
args,
|
args,
|
||||||
AccountId(ctx.args.meta.collective, submitter),
|
UserTaskScope(ctx.args.meta.collective, submitter.some),
|
||||||
prio,
|
prio,
|
||||||
None
|
None
|
||||||
)
|
)
|
||||||
|
@ -53,7 +53,12 @@ object PeriodicDueItemsTask {
|
|||||||
def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])(
|
def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])(
|
||||||
cont: Vector[NotificationChannel] => F[Unit]
|
cont: Vector[NotificationChannel] => F[Unit]
|
||||||
): F[Unit] =
|
): F[Unit] =
|
||||||
TaskOperations.withChannel(ctx.logger, ctx.args.channels, ctx.args.account, ops)(cont)
|
TaskOperations.withChannel(
|
||||||
|
ctx.logger,
|
||||||
|
ctx.args.channels,
|
||||||
|
ctx.args.account.userId,
|
||||||
|
ops
|
||||||
|
)(cont)
|
||||||
|
|
||||||
def withItems[F[_]: Sync](
|
def withItems[F[_]: Sync](
|
||||||
ctx: Context[F, Args],
|
ctx: Context[F, Args],
|
||||||
|
@ -54,7 +54,12 @@ object PeriodicQueryTask {
|
|||||||
def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])(
|
def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])(
|
||||||
cont: Vector[NotificationChannel] => F[Unit]
|
cont: Vector[NotificationChannel] => F[Unit]
|
||||||
): F[Unit] =
|
): F[Unit] =
|
||||||
TaskOperations.withChannel(ctx.logger, ctx.args.channels, ctx.args.account, ops)(cont)
|
TaskOperations.withChannel(
|
||||||
|
ctx.logger,
|
||||||
|
ctx.args.channels,
|
||||||
|
ctx.args.account.userId,
|
||||||
|
ops
|
||||||
|
)(cont)
|
||||||
|
|
||||||
private def queryString(q: ItemQuery.Expr) =
|
private def queryString(q: ItemQuery.Expr) =
|
||||||
ItemQueryParser.asString(q)
|
ItemQueryParser.asString(q)
|
||||||
@ -64,7 +69,10 @@ object PeriodicQueryTask {
|
|||||||
): F[Unit] = {
|
): F[Unit] = {
|
||||||
def fromBookmark(id: String) =
|
def fromBookmark(id: String) =
|
||||||
store
|
store
|
||||||
.transact(RQueryBookmark.findByNameOrId(ctx.args.account, id))
|
.transact(
|
||||||
|
RQueryBookmark
|
||||||
|
.findByNameOrId(ctx.args.account.collectiveId, ctx.args.account.userId, id)
|
||||||
|
)
|
||||||
.map(_.map(_.query))
|
.map(_.map(_.query))
|
||||||
.flatTap(q =>
|
.flatTap(q =>
|
||||||
ctx.logger.debug(s"Loaded bookmark '$id': ${q.map(_.expr).map(queryString)}")
|
ctx.logger.debug(s"Loaded bookmark '$id': ${q.map(_.expr).map(queryString)}")
|
||||||
@ -72,7 +80,9 @@ object PeriodicQueryTask {
|
|||||||
|
|
||||||
def fromShare(id: String) =
|
def fromShare(id: String) =
|
||||||
store
|
store
|
||||||
.transact(RShare.findOneByCollective(ctx.args.account.collective, Some(true), id))
|
.transact(
|
||||||
|
RShare.findOneByCollective(ctx.args.account.collectiveId, Some(true), id)
|
||||||
|
)
|
||||||
.map(_.map(_.query))
|
.map(_.map(_.query))
|
||||||
.flatTap(q =>
|
.flatTap(q =>
|
||||||
ctx.logger.debug(s"Loaded share '$id': ${q.map(_.expr).map(queryString)}")
|
ctx.logger.debug(s"Loaded share '$id': ${q.map(_.expr).map(queryString)}")
|
||||||
|
@ -25,24 +25,24 @@ trait TaskOperations {
|
|||||||
def withChannel[F[_]: Sync](
|
def withChannel[F[_]: Sync](
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
channelsIn: NonEmptyList[ChannelRef],
|
channelsIn: NonEmptyList[ChannelRef],
|
||||||
accountId: AccountId,
|
userId: Ident,
|
||||||
ops: ONotification[F]
|
ops: ONotification[F]
|
||||||
)(
|
)(
|
||||||
cont: Vector[NotificationChannel] => F[Unit]
|
cont: Vector[NotificationChannel] => F[Unit]
|
||||||
): F[Unit] = {
|
): F[Unit] = {
|
||||||
val channels =
|
val channels =
|
||||||
channelsIn.toList.toVector.flatTraverse(ops.findNotificationChannel(_, accountId))
|
channelsIn.toList.toVector.flatTraverse(ops.findNotificationChannel(_, userId))
|
||||||
|
|
||||||
channels.flatMap { ch =>
|
channels.flatMap { ch =>
|
||||||
if (ch.isEmpty)
|
if (ch.isEmpty)
|
||||||
logger.error(s"No channels found for the given data: ${channelsIn}")
|
logger.error(s"No channels found for the given data: $channelsIn")
|
||||||
else cont(ch)
|
else cont(ch)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def withEventContext[F[_]](
|
def withEventContext[F[_]](
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
account: AccountId,
|
account: AccountInfo,
|
||||||
baseUrl: Option[LenientUri],
|
baseUrl: Option[LenientUri],
|
||||||
items: Vector[ListItem],
|
items: Vector[ListItem],
|
||||||
contentStart: Option[String],
|
contentStart: Option[String],
|
||||||
|
@ -9,10 +9,10 @@ package docspell.joex.pagecount
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.{Chunk, Stream}
|
import fs2.{Chunk, Stream}
|
||||||
|
|
||||||
import docspell.backend.JobFactory
|
import docspell.backend.JobFactory
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.scheduler._
|
import docspell.scheduler._
|
||||||
|
import docspell.scheduler.usertask.UserTaskScope
|
||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
import docspell.store.records.RAttachment
|
import docspell.store.records.RAttachment
|
||||||
|
|
||||||
@ -51,12 +51,12 @@ object AllPageCountTask {
|
|||||||
.compile
|
.compile
|
||||||
.foldMonoid
|
.foldMonoid
|
||||||
|
|
||||||
private def findAttachments[F[_]] =
|
private def findAttachments =
|
||||||
RAttachment.findAllWithoutPageCount(50)
|
RAttachment.findAllWithoutPageCount(50)
|
||||||
|
|
||||||
private def createJobs[F[_]: Sync](ras: Chunk[RAttachment]): Stream[F, Job[String]] = {
|
private def createJobs[F[_]: Sync](ras: Chunk[RAttachment]): Stream[F, Job[String]] = {
|
||||||
def mkJob(ra: RAttachment): F[Job[MakePageCountArgs]] =
|
def mkJob(ra: RAttachment): F[Job[MakePageCountArgs]] =
|
||||||
JobFactory.makePageCount(MakePageCountArgs(ra.id), None)
|
JobFactory.makePageCount(MakePageCountArgs(ra.id), UserTaskScope.system)
|
||||||
|
|
||||||
val jobs = ras.traverse(mkJob)
|
val jobs = ras.traverse(mkJob)
|
||||||
Stream.evalUnChunk(jobs).map(_.encode)
|
Stream.evalUnChunk(jobs).map(_.encode)
|
||||||
@ -66,10 +66,9 @@ object AllPageCountTask {
|
|||||||
Job
|
Job
|
||||||
.createNew(
|
.createNew(
|
||||||
AllPageCountTask.taskName,
|
AllPageCountTask.taskName,
|
||||||
DocspellSystem.taskGroup,
|
UserTaskScope.system,
|
||||||
(),
|
(),
|
||||||
"Create all page-counts",
|
"Create all page-counts",
|
||||||
DocspellSystem.taskGroup,
|
|
||||||
Priority.Low,
|
Priority.Low,
|
||||||
Some(DocspellSystem.allPageCountTaskTracker)
|
Some(DocspellSystem.allPageCountTaskTracker)
|
||||||
)
|
)
|
||||||
|
@ -9,9 +9,9 @@ package docspell.joex.pdfconv
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.{Chunk, Stream}
|
import fs2.{Chunk, Stream}
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.scheduler._
|
import docspell.scheduler._
|
||||||
|
import docspell.scheduler.usertask.UserTaskScope
|
||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
import docspell.store.records.RAttachment
|
import docspell.store.records.RAttachment
|
||||||
|
|
||||||
@ -55,7 +55,10 @@ object ConvertAllPdfTask {
|
|||||||
private def createJobs[F[_]: Sync](
|
private def createJobs[F[_]: Sync](
|
||||||
ctx: Context[F, Args]
|
ctx: Context[F, Args]
|
||||||
)(ras: Chunk[RAttachment]): Stream[F, Job[String]] = {
|
)(ras: Chunk[RAttachment]): Stream[F, Job[String]] = {
|
||||||
val collectiveOrSystem = ctx.args.collective.getOrElse(DocspellSystem.taskGroup)
|
val collectiveOrSystem =
|
||||||
|
ctx.args.collective
|
||||||
|
.map(UserTaskScope.collective)
|
||||||
|
.getOrElse(UserTaskScope.system)
|
||||||
|
|
||||||
def mkJob(ra: RAttachment): F[Job[PdfConvTask.Args]] =
|
def mkJob(ra: RAttachment): F[Job[PdfConvTask.Args]] =
|
||||||
Job.createNew(
|
Job.createNew(
|
||||||
@ -63,7 +66,6 @@ object ConvertAllPdfTask {
|
|||||||
collectiveOrSystem,
|
collectiveOrSystem,
|
||||||
PdfConvTask.Args(ra.id),
|
PdfConvTask.Args(ra.id),
|
||||||
s"Convert pdf ${ra.id.id}/${ra.name.getOrElse("-")}",
|
s"Convert pdf ${ra.id.id}/${ra.name.getOrElse("-")}",
|
||||||
collectiveOrSystem,
|
|
||||||
Priority.Low,
|
Priority.Low,
|
||||||
Some(PdfConvTask.taskName / ra.id)
|
Some(PdfConvTask.taskName / ra.id)
|
||||||
)
|
)
|
||||||
|
@ -9,11 +9,11 @@ package docspell.joex.preview
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.{Chunk, Stream}
|
import fs2.{Chunk, Stream}
|
||||||
|
|
||||||
import docspell.backend.JobFactory
|
import docspell.backend.JobFactory
|
||||||
import docspell.common.MakePreviewArgs.StoreMode
|
import docspell.common.MakePreviewArgs.StoreMode
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.scheduler._
|
import docspell.scheduler._
|
||||||
|
import docspell.scheduler.usertask.UserTaskScope
|
||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
import docspell.store.records.RAttachment
|
import docspell.store.records.RAttachment
|
||||||
|
|
||||||
@ -64,15 +64,12 @@ object AllPreviewsTask {
|
|||||||
private def createJobs[F[_]: Sync](
|
private def createJobs[F[_]: Sync](
|
||||||
ctx: Context[F, Args]
|
ctx: Context[F, Args]
|
||||||
)(ras: Chunk[RAttachment]): Stream[F, Job[MakePreviewArgs]] = {
|
)(ras: Chunk[RAttachment]): Stream[F, Job[MakePreviewArgs]] = {
|
||||||
val collectiveOrSystem = {
|
|
||||||
val cid = ctx.args.collective.getOrElse(DocspellSystem.taskGroup)
|
|
||||||
AccountId(cid, DocspellSystem.user)
|
|
||||||
}
|
|
||||||
|
|
||||||
def mkJob(ra: RAttachment): F[Job[MakePreviewArgs]] =
|
def mkJob(ra: RAttachment): F[Job[MakePreviewArgs]] =
|
||||||
JobFactory.makePreview(
|
JobFactory.makePreview(
|
||||||
MakePreviewArgs(ra.id, ctx.args.storeMode),
|
MakePreviewArgs(ra.id, ctx.args.storeMode),
|
||||||
collectiveOrSystem.some
|
ctx.args.collective
|
||||||
|
.map(UserTaskScope.collective)
|
||||||
|
.getOrElse(UserTaskScope.system)
|
||||||
)
|
)
|
||||||
|
|
||||||
val jobs = ras.traverse(mkJob)
|
val jobs = ras.traverse(mkJob)
|
||||||
@ -81,8 +78,9 @@ object AllPreviewsTask {
|
|||||||
|
|
||||||
def job[F[_]: Sync](
|
def job[F[_]: Sync](
|
||||||
storeMode: MakePreviewArgs.StoreMode,
|
storeMode: MakePreviewArgs.StoreMode,
|
||||||
cid: Option[Ident]
|
cid: Option[CollectiveId]
|
||||||
): F[Job[String]] =
|
): F[Job[String]] =
|
||||||
JobFactory.allPreviews(AllPreviewsArgs(cid, storeMode), None).map(_.encode)
|
JobFactory
|
||||||
|
.allPreviews(AllPreviewsArgs(cid, storeMode), UserTaskScope.system)
|
||||||
|
.map(_.encode)
|
||||||
}
|
}
|
||||||
|
@ -75,7 +75,7 @@ object AttachmentPreview {
|
|||||||
|
|
||||||
private def createRecord[F[_]: Sync](
|
private def createRecord[F[_]: Sync](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
png: Stream[F, Byte],
|
png: Stream[F, Byte],
|
||||||
ra: RAttachment
|
ra: RAttachment
|
||||||
): F[RAttachmentPreview] = {
|
): F[RAttachmentPreview] = {
|
||||||
|
@ -81,7 +81,7 @@ object TextAnalysis {
|
|||||||
labels <- analyser.annotate(
|
labels <- analyser.annotate(
|
||||||
ctx.logger,
|
ctx.logger,
|
||||||
sett,
|
sett,
|
||||||
ctx.args.meta.collective,
|
ctx.args.meta.collective.valueAsIdent,
|
||||||
rm.content.getOrElse("")
|
rm.content.getOrElse("")
|
||||||
)
|
)
|
||||||
} yield (rm.copy(nerlabels = labels.all.toList), AttachmentDates(rm, labels.dates))
|
} yield (rm.copy(nerlabels = labels.all.toList), AttachmentDates(rm, labels.dates))
|
||||||
|
@ -50,7 +50,7 @@ object TextExtraction {
|
|||||||
None,
|
None,
|
||||||
ctx.args.meta.language
|
ctx.args.meta.language
|
||||||
)
|
)
|
||||||
_ <- fts.indexData(ctx.logger, (idxItem +: txt.map(_.td)): _*)
|
_ <- fts.indexData(ctx.logger, idxItem +: txt.map(_.td): _*)
|
||||||
dur <- start
|
dur <- start
|
||||||
extractedTags = txt.flatMap(_.tags).distinct.toList
|
extractedTags = txt.flatMap(_.tags).distinct.toList
|
||||||
_ <- ctx.logger.info(s"Text extraction finished in ${dur.formatExact}.")
|
_ <- ctx.logger.info(s"Text extraction finished in ${dur.formatExact}.")
|
||||||
@ -71,7 +71,7 @@ object TextExtraction {
|
|||||||
store: Store[F],
|
store: Store[F],
|
||||||
cfg: ExtractConfig,
|
cfg: ExtractConfig,
|
||||||
lang: Language,
|
lang: Language,
|
||||||
collective: Ident,
|
collective: CollectiveId,
|
||||||
item: ItemData
|
item: ItemData
|
||||||
)(ra: RAttachment): F[Result] = {
|
)(ra: RAttachment): F[Result] = {
|
||||||
def makeTextData(pair: (RAttachmentMeta, List[String])): Result =
|
def makeTextData(pair: (RAttachmentMeta, List[String])): Result =
|
||||||
|
@ -42,15 +42,15 @@ object ScanMailboxTask {
|
|||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
for {
|
for {
|
||||||
_ <- ctx.logger.info(
|
_ <- ctx.logger.info(
|
||||||
s"=== Start importing mails for user ${ctx.args.account.user.id}"
|
s"=== Start importing mails for user ${ctx.args.account.login.id}"
|
||||||
)
|
)
|
||||||
_ <- ctx.logger.debug(s"Settings: ${ctx.args.asJson.noSpaces}")
|
_ <- ctx.logger.debug(s"Settings: ${ctx.args.asJson.noSpaces}")
|
||||||
mailCfg <- getMailSettings(ctx, store)
|
mailCfg <- getMailSettings(ctx, store)
|
||||||
folders = ctx.args.folders.mkString(", ")
|
folders = ctx.args.folders.mkString(", ")
|
||||||
userId = ctx.args.account.user
|
login = ctx.args.account.login
|
||||||
imapConn = ctx.args.imapConnection
|
imapConn = ctx.args.imapConnection
|
||||||
_ <- ctx.logger.info(
|
_ <- ctx.logger.info(
|
||||||
s"Reading mails for user ${userId.id} from ${imapConn.id}/$folders"
|
s"Reading mails for user ${login.id} from ${imapConn.id}/$folders"
|
||||||
)
|
)
|
||||||
_ <- importMails(cfg, mailCfg, emil, upload, joex, ctx, store)
|
_ <- importMails(cfg, mailCfg, emil, upload, joex, ctx, store)
|
||||||
} yield ()
|
} yield ()
|
||||||
@ -61,7 +61,7 @@ object ScanMailboxTask {
|
|||||||
|
|
||||||
def getMailSettings[F[_]: Sync](ctx: Context[F, Args], store: Store[F]): F[RUserImap] =
|
def getMailSettings[F[_]: Sync](ctx: Context[F, Args], store: Store[F]): F[RUserImap] =
|
||||||
store
|
store
|
||||||
.transact(RUserImap.getByName(ctx.args.account, ctx.args.imapConnection))
|
.transact(RUserImap.getByName(ctx.args.account.userId, ctx.args.imapConnection))
|
||||||
.flatMap {
|
.flatMap {
|
||||||
case Some(c) => c.pure[F]
|
case Some(c) => c.pure[F]
|
||||||
case None =>
|
case None =>
|
||||||
@ -234,13 +234,13 @@ object ScanMailboxTask {
|
|||||||
ctx.logger.debug("Not matching on subjects. No filter given") *> headers.pure[F]
|
ctx.logger.debug("Not matching on subjects. No filter given") *> headers.pure[F]
|
||||||
}
|
}
|
||||||
|
|
||||||
def filterMessageIds[C](headers: Vector[MailHeaderItem]): F[Vector[MailHeaderItem]] =
|
def filterMessageIds(headers: Vector[MailHeaderItem]): F[Vector[MailHeaderItem]] =
|
||||||
NonEmptyList.fromFoldable(headers.flatMap(_.mh.messageId)) match {
|
NonEmptyList.fromFoldable(headers.flatMap(_.mh.messageId)) match {
|
||||||
case Some(nl) =>
|
case Some(nl) =>
|
||||||
for {
|
for {
|
||||||
archives <- store.transact(
|
archives <- store.transact(
|
||||||
RAttachmentArchive
|
RAttachmentArchive
|
||||||
.findByMessageIdAndCollective(nl, ctx.args.account.collective)
|
.findByMessageIdAndCollective(nl, ctx.args.account.collectiveId)
|
||||||
)
|
)
|
||||||
existing = archives.flatMap(_.messageId).toSet
|
existing = archives.flatMap(_.messageId).toSet
|
||||||
mails <- headers
|
mails <- headers
|
||||||
@ -265,7 +265,7 @@ object ScanMailboxTask {
|
|||||||
store.transact(
|
store.transact(
|
||||||
QOrganization
|
QOrganization
|
||||||
.findPersonByContact(
|
.findPersonByContact(
|
||||||
ctx.args.account.collective,
|
ctx.args.account.collectiveId,
|
||||||
from.address,
|
from.address,
|
||||||
Some(ContactKind.Email),
|
Some(ContactKind.Email),
|
||||||
Some(NonEmptyList.of(PersonUse.concerning))
|
Some(NonEmptyList.of(PersonUse.concerning))
|
||||||
@ -320,7 +320,7 @@ object ScanMailboxTask {
|
|||||||
dir <- getDirection(mail.header)
|
dir <- getDirection(mail.header)
|
||||||
meta = OUpload.UploadMeta(
|
meta = OUpload.UploadMeta(
|
||||||
Some(dir),
|
Some(dir),
|
||||||
s"mailbox-${ctx.args.account.user.id}",
|
s"mailbox-${ctx.args.account.login.id}",
|
||||||
args.itemFolder,
|
args.itemFolder,
|
||||||
Seq.empty,
|
Seq.empty,
|
||||||
true,
|
true,
|
||||||
@ -337,7 +337,12 @@ object ScanMailboxTask {
|
|||||||
priority = Priority.Low,
|
priority = Priority.Low,
|
||||||
tracker = None
|
tracker = None
|
||||||
)
|
)
|
||||||
res <- upload.submit(data, ctx.args.account, None)
|
res <- upload.submit(
|
||||||
|
data,
|
||||||
|
ctx.args.account.collectiveId,
|
||||||
|
ctx.args.account.userId.some,
|
||||||
|
None
|
||||||
|
)
|
||||||
} yield res
|
} yield res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,13 +9,11 @@ package docspell.joex.updatecheck
|
|||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.scheduler.Task
|
import docspell.scheduler.Task
|
||||||
import docspell.scheduler.usertask.UserTask
|
import docspell.scheduler.usertask.UserTask
|
||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
import docspell.store.records.RUserEmail
|
import docspell.store.records.{RUser, RUserEmail}
|
||||||
|
|
||||||
import emil._
|
import emil._
|
||||||
|
|
||||||
object UpdateCheckTask {
|
object UpdateCheckTask {
|
||||||
@ -83,7 +81,9 @@ object UpdateCheckTask {
|
|||||||
store: Store[F],
|
store: Store[F],
|
||||||
cfg: UpdateCheckConfig
|
cfg: UpdateCheckConfig
|
||||||
): F[RUserEmail] =
|
): F[RUserEmail] =
|
||||||
OptionT(store.transact(RUserEmail.getByName(cfg.senderAccount, cfg.smtpId)))
|
OptionT(store.transact(RUser.findByAccount(cfg.senderAccount)))
|
||||||
|
.map(_.uid)
|
||||||
|
.flatMap(uid => OptionT(store.transact(RUserEmail.getByName(uid, cfg.smtpId))))
|
||||||
.getOrElseF(
|
.getOrElseF(
|
||||||
Sync[F].raiseError(
|
Sync[F].raiseError(
|
||||||
new Exception(
|
new Exception(
|
||||||
|
@ -18,7 +18,7 @@ import munit._
|
|||||||
class TagsChangedCtxTest extends FunSuite {
|
class TagsChangedCtxTest extends FunSuite {
|
||||||
|
|
||||||
val url = LenientUri.unsafe("http://test")
|
val url = LenientUri.unsafe("http://test")
|
||||||
val account = AccountId(id("user2"), id("user2"))
|
val account = AccountInfo(CollectiveId(1), id("user2"), id("user-abc-def"), id("user2"))
|
||||||
val tag = Tag(id("a-b-1"), "tag-red", Some("doctype"))
|
val tag = Tag(id("a-b-1"), "tag-red", Some("doctype"))
|
||||||
val item = Item(
|
val item = Item(
|
||||||
id = id("item-1"),
|
id = id("item-1"),
|
||||||
|
@ -0,0 +1,58 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package db.migration.data
|
||||||
|
|
||||||
|
import db.migration.data.DownloadZipArgs.DownloadRequest
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.query.ItemQuery.Expr.ValidItemStates
|
||||||
|
import docspell.query.{ItemQuery, ItemQueryParser}
|
||||||
|
import docspell.store.queries.Query
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
/** @deprecated replaced with a version using `AccountInfo` */
|
||||||
|
final case class DownloadZipArgs(accountId: AccountId, req: DownloadRequest)
|
||||||
|
|
||||||
|
object DownloadZipArgs {
|
||||||
|
val taskName: Ident = Ident.unsafe("download-query-zip")
|
||||||
|
|
||||||
|
final case class DownloadRequest(
|
||||||
|
query: ItemQuery,
|
||||||
|
fileType: DownloadAllType,
|
||||||
|
maxFiles: Int,
|
||||||
|
maxSize: ByteSize
|
||||||
|
) {
|
||||||
|
def toQuery(account: AccountInfo): Query =
|
||||||
|
Query
|
||||||
|
.all(account)
|
||||||
|
.withFix(_.andQuery(ValidItemStates))
|
||||||
|
.withCond(_ => Query.QueryExpr(query.expr))
|
||||||
|
|
||||||
|
def itemQueryString =
|
||||||
|
ItemQueryParser.asString(query.expr)
|
||||||
|
}
|
||||||
|
object DownloadRequest {
|
||||||
|
implicit val itemQueryDecoder: Decoder[ItemQuery] =
|
||||||
|
Decoder.decodeString.emap(str => ItemQueryParser.parse(str).left.map(_.render))
|
||||||
|
|
||||||
|
implicit val itemQueryEncoder: Encoder[ItemQuery] =
|
||||||
|
Encoder.encodeString.contramap(q =>
|
||||||
|
q.raw.getOrElse(ItemQueryParser.unsafeAsString(q.expr))
|
||||||
|
)
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[DownloadRequest] =
|
||||||
|
deriveDecoder
|
||||||
|
|
||||||
|
implicit val jsonEncoder: Encoder[DownloadRequest] =
|
||||||
|
deriveEncoder
|
||||||
|
}
|
||||||
|
|
||||||
|
implicit val jsonEncoder: Encoder[DownloadZipArgs] =
|
||||||
|
deriveEncoder
|
||||||
|
implicit val jsonDecoder: Decoder[DownloadZipArgs] =
|
||||||
|
deriveDecoder
|
||||||
|
}
|
@ -202,7 +202,7 @@ object RNotificationChannel {
|
|||||||
s"Looking up user smtp for ${userId.id} and ${conn.id}"
|
s"Looking up user smtp for ${userId.id} and ${conn.id}"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
mailConn <- OptionT(RUserEmail.getByUser(userId, conn))
|
mailConn <- OptionT(RUserEmail.getByName(userId, conn))
|
||||||
rec = RNotificationChannelMail(
|
rec = RNotificationChannelMail(
|
||||||
id,
|
id,
|
||||||
userId,
|
userId,
|
||||||
|
@ -175,13 +175,6 @@ object RUserEmail {
|
|||||||
run(select(t.all), from(t), t.uid === userId).query[RUserEmail].to[Vector]
|
run(select(t.all), from(t), t.uid === userId).query[RUserEmail].to[Vector]
|
||||||
}
|
}
|
||||||
|
|
||||||
def getByUser(userId: Ident, name: Ident): ConnectionIO[Option[RUserEmail]] = {
|
|
||||||
val t = Table(None)
|
|
||||||
run(select(t.all), from(t), t.uid === userId && t.name === name)
|
|
||||||
.query[RUserEmail]
|
|
||||||
.option
|
|
||||||
}
|
|
||||||
|
|
||||||
private def findByAccount0(
|
private def findByAccount0(
|
||||||
userId: Ident,
|
userId: Ident,
|
||||||
nameQ: Option[String],
|
nameQ: Option[String],
|
||||||
|
@ -57,10 +57,10 @@ class TempFtsOpsTest extends DatabaseTest {
|
|||||||
|
|
||||||
def prepareItems(store: Store[IO]) =
|
def prepareItems(store: Store[IO]) =
|
||||||
for {
|
for {
|
||||||
_ <- store.transact(RCollective.insert(makeCollective(CollectiveId(2))))
|
cid <- store.transact(RCollective.insert(makeCollective))
|
||||||
_ <- store.transact(RUser.insert(makeUser(CollectiveId(2))))
|
_ <- store.transact(RUser.insert(makeUser(cid)))
|
||||||
items = (0 until 200)
|
items = (0 until 200)
|
||||||
.map(makeItem(_, CollectiveId(2)))
|
.map(makeItem(_, cid))
|
||||||
.toList
|
.toList
|
||||||
_ <- items.traverse(i => store.transact(RItem.insert(i)))
|
_ <- items.traverse(i => store.transact(RItem.insert(i)))
|
||||||
} yield ()
|
} yield ()
|
||||||
@ -184,9 +184,9 @@ class TempFtsOpsTest extends DatabaseTest {
|
|||||||
Timestamp(Instant.now)
|
Timestamp(Instant.now)
|
||||||
)
|
)
|
||||||
|
|
||||||
def makeCollective(cid: CollectiveId): RCollective =
|
def makeCollective: RCollective =
|
||||||
RCollective(
|
RCollective(
|
||||||
cid,
|
CollectiveId.unknown,
|
||||||
DocspellSystem.account.collective,
|
DocspellSystem.account.collective,
|
||||||
CollectiveState.Active,
|
CollectiveState.Active,
|
||||||
Language.English,
|
Language.English,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user