mirror of
				https://github.com/TheAnachronism/docspell.git
				synced 2025-10-30 21:40:12 +00:00 
			
		
		
		
	Adopt joex to new collective-id
This commit is contained in:
		| @@ -86,7 +86,7 @@ object ODownloadAll { | ||||
|       ): F[DownloadSummary] = for { | ||||
|         _ <- logger.info(s"Download all request: $req") | ||||
|         summary <- getSummary(account, req) | ||||
|         args = DownloadZipArgs(account.asAccountId, req) | ||||
|         args = DownloadZipArgs(account, req) | ||||
|         _ <- OptionT | ||||
|           .whenF(summary.state == DownloadState.NotPresent) { | ||||
|             JobFactory | ||||
|   | ||||
| @@ -8,11 +8,10 @@ package docspell.backend.task | ||||
|  | ||||
| import docspell.backend.ops.ODownloadAll.model.DownloadRequest | ||||
| import docspell.common._ | ||||
|  | ||||
| import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} | ||||
| import io.circe.{Decoder, Encoder} | ||||
|  | ||||
| final case class DownloadZipArgs(accountId: AccountId, req: DownloadRequest) | ||||
| final case class DownloadZipArgs(account: AccountInfo, req: DownloadRequest) | ||||
|  | ||||
| object DownloadZipArgs { | ||||
|   val taskName: Ident = Ident.unsafe("download-query-zip") | ||||
|   | ||||
| @@ -84,7 +84,7 @@ final class JoexAppImpl[F[_]: Async]( | ||||
|       .evalMap { es => | ||||
|         val args = EmptyTrashArgs(es.cid, es.minAge) | ||||
|         uts.updateOneTask( | ||||
|           UserTaskScope(args.collective), | ||||
|           UserTaskScope.collective(args.collective), | ||||
|           args.makeSubject.some, | ||||
|           EmptyTrashTask.userTask(args, es.schedule) | ||||
|         ) | ||||
|   | ||||
| @@ -49,7 +49,7 @@ object GenericItemAddonTask extends LoggerExtension { | ||||
|       trigger: AddonTriggerType, | ||||
|       addonTaskIds: Set[Ident] | ||||
|   )( | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       data: ItemData, | ||||
|       maybeMeta: Option[ProcessItemArgs.ProcessMeta] | ||||
|   ): Task[F, Unit, ItemData] = | ||||
| @@ -63,7 +63,7 @@ object GenericItemAddonTask extends LoggerExtension { | ||||
|       trigger: AddonTriggerType, | ||||
|       addonTaskIds: Set[Ident] | ||||
|   )( | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       data: ItemData, | ||||
|       maybeMeta: Option[ProcessItemArgs.ProcessMeta] | ||||
|   ): Task[F, Unit, ExecResult] = | ||||
|   | ||||
| @@ -18,7 +18,7 @@ import docspell.store.queries.QCollective | ||||
| import io.circe.generic.semiauto._ | ||||
| import io.circe.{Decoder, Encoder} | ||||
|  | ||||
| case class NerFile(collective: Ident, updated: Timestamp, creation: Timestamp) { | ||||
| case class NerFile(collective: CollectiveId, updated: Timestamp, creation: Timestamp) { | ||||
|   def nerFilePath(directory: Path): Path = | ||||
|     NerFile.nerFilePath(directory, collective) | ||||
|  | ||||
| @@ -33,14 +33,14 @@ object NerFile { | ||||
|   implicit val jsonEncoder: Encoder[NerFile] = | ||||
|     deriveEncoder[NerFile] | ||||
|  | ||||
|   private def nerFilePath(directory: Path, collective: Ident): Path = | ||||
|     directory.resolve(s"${collective.id}.txt") | ||||
|   private def nerFilePath(directory: Path, collective: CollectiveId): Path = | ||||
|     directory.resolve(s"${collective.value}.txt") | ||||
|  | ||||
|   private def jsonFilePath(directory: Path, collective: Ident): Path = | ||||
|     directory.resolve(s"${collective.id}.json") | ||||
|   private def jsonFilePath(directory: Path, collective: CollectiveId): Path = | ||||
|     directory.resolve(s"${collective.value}.json") | ||||
|  | ||||
|   def find[F[_]: Async]( | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       directory: Path | ||||
|   ): F[Option[NerFile]] = { | ||||
|     val file = jsonFilePath(directory, collective) | ||||
|   | ||||
| @@ -24,7 +24,7 @@ import io.circe.syntax._ | ||||
| /** Maintains a custom regex-ner file per collective for stanford's regexner annotator. */ | ||||
| trait RegexNerFile[F[_]] { | ||||
|  | ||||
|   def makeFile(collective: Ident): F[Option[Path]] | ||||
|   def makeFile(collective: CollectiveId): F[Option[Path]] | ||||
|  | ||||
| } | ||||
|  | ||||
| @@ -49,11 +49,11 @@ object RegexNerFile { | ||||
|  | ||||
|     private[this] val logger = docspell.logging.getLogger[F] | ||||
|  | ||||
|     def makeFile(collective: Ident): F[Option[Path]] = | ||||
|     def makeFile(collective: CollectiveId): F[Option[Path]] = | ||||
|       if (cfg.maxEntries > 0) doMakeFile(collective) | ||||
|       else (None: Option[Path]).pure[F] | ||||
|  | ||||
|     def doMakeFile(collective: Ident): F[Option[Path]] = | ||||
|     def doMakeFile(collective: CollectiveId): F[Option[Path]] = | ||||
|       for { | ||||
|         now <- Timestamp.current[F] | ||||
|         existing <- NerFile.find[F](collective, cfg.directory) | ||||
| @@ -75,7 +75,7 @@ object RegexNerFile { | ||||
|       } yield result | ||||
|  | ||||
|     private def updateFile( | ||||
|         collective: Ident, | ||||
|         collective: CollectiveId, | ||||
|         now: Timestamp, | ||||
|         current: Option[NerFile] | ||||
|     ): F[Option[Path]] = | ||||
| @@ -95,7 +95,7 @@ object RegexNerFile { | ||||
|                     ) *> cur.pure[F] | ||||
|                   else | ||||
|                     logger.debug( | ||||
|                       s"There have been state changes for collective '${collective.id}'. Reload NER file." | ||||
|                       s"There have been state changes for collective '${collective.value}'. Reload NER file." | ||||
|                     ) *> createFile(lup, collective, now) | ||||
|                 nerf.map(_.nerFilePath(cfg.directory).some) | ||||
|               case None => | ||||
| @@ -119,7 +119,7 @@ object RegexNerFile { | ||||
|  | ||||
|     private def createFile( | ||||
|         lastUpdate: Timestamp, | ||||
|         collective: Ident, | ||||
|         collective: CollectiveId, | ||||
|         now: Timestamp | ||||
|     ): F[NerFile] = { | ||||
|       def update(nf: NerFile, text: String): F[Unit] = | ||||
| @@ -127,7 +127,7 @@ object RegexNerFile { | ||||
|           for { | ||||
|             jsonFile <- Sync[F].pure(nf.jsonFilePath(cfg.directory)) | ||||
|             _ <- logger.debug( | ||||
|               s"Writing custom NER file for collective '${collective.id}'" | ||||
|               s"Writing custom NER file for collective '${collective.value}'" | ||||
|             ) | ||||
|             _ <- jsonFile.parent match { | ||||
|               case Some(p) => File.mkDir(p) | ||||
| @@ -139,7 +139,9 @@ object RegexNerFile { | ||||
|         ) | ||||
|  | ||||
|       for { | ||||
|         _ <- logger.info(s"Generating custom NER file for collective '${collective.id}'") | ||||
|         _ <- logger.info( | ||||
|           s"Generating custom NER file for collective '${collective.value}'" | ||||
|         ) | ||||
|         names <- store.transact(QCollective.allNames(collective, cfg.maxEntries)) | ||||
|         nerFile = NerFile(collective, lastUpdate, now) | ||||
|         _ <- update(nerFile, NerFile.mkNerConfig(names)) | ||||
| @@ -152,8 +154,8 @@ object RegexNerFile { | ||||
|     import docspell.store.qb.DSL._ | ||||
|     import docspell.store.qb._ | ||||
|  | ||||
|     def latestUpdate(collective: Ident): ConnectionIO[Option[Timestamp]] = { | ||||
|       def max_(col: Column[_], cidCol: Column[Ident]): Select = | ||||
|     def latestUpdate(collective: CollectiveId): ConnectionIO[Option[Timestamp]] = { | ||||
|       def max_(col: Column[_], cidCol: Column[CollectiveId]): Select = | ||||
|         Select(max(col).as("t"), from(col.table), cidCol === collective) | ||||
|  | ||||
|       val sql = union( | ||||
|   | ||||
| @@ -35,7 +35,7 @@ object DownloadZipTask { | ||||
|   ): Task[F, Args, Result] = | ||||
|     Task { ctx => | ||||
|       val req = ctx.args.req | ||||
|       val query = req.toQuery(ctx.args.accountId) | ||||
|       val query = req.toQuery(ctx.args.account) | ||||
|  | ||||
|       val allFiles = | ||||
|         Stream | ||||
| @@ -53,7 +53,7 @@ object DownloadZipTask { | ||||
|           .through(Zip[F](ctx.logger.some).zip(chunkSize)) | ||||
|           .through( | ||||
|             store.fileRepo.save( | ||||
|               ctx.args.accountId.collective, | ||||
|               ctx.args.account.collectiveId, | ||||
|               FileCategory.DownloadAll, | ||||
|               MimeTypeHint.advertised("application/zip") | ||||
|             ) | ||||
| @@ -61,10 +61,10 @@ object DownloadZipTask { | ||||
|  | ||||
|       for { | ||||
|         _ <- ctx.logger.info(s"Start zipping ${req.itemQueryString}") | ||||
|         summary <- downloadOps.getSummary(ctx.args.accountId, req) | ||||
|         summary <- downloadOps.getSummary(ctx.args.account, req) | ||||
|         _ <- ctx.logger.debug(s"Summary: $summary") | ||||
|         file <- storeZipFile.compile.lastOrError | ||||
|         row <- createRow(summary, ctx.args.accountId.collective, file) | ||||
|         row <- createRow(summary, ctx.args.account.collectiveId, file) | ||||
|         _ <- ctx.logger.debug(s"Inserting zip file: $row") | ||||
|         _ <- store.transact(RDownloadQuery.insert(row)) | ||||
|       } yield Result(summary.fileCount) | ||||
| @@ -92,7 +92,7 @@ object DownloadZipTask { | ||||
|  | ||||
|   def createRow[F[_]: Sync]( | ||||
|       summary: DownloadSummary, | ||||
|       cid: Ident, | ||||
|       cid: CollectiveId, | ||||
|       file: FileKey | ||||
|   ): F[RDownloadQuery] = | ||||
|     Timestamp.current[F].map { now => | ||||
|   | ||||
| @@ -68,7 +68,7 @@ object EmptyTrashTask { | ||||
|       .foldMonoid | ||||
|  | ||||
|   private def deleteChunk[F[_]: Async]( | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       itemOps: OItem[F], | ||||
|       ctx: Context[F, _] | ||||
|   )(chunk: Vector[RItem]): F[Int] = | ||||
|   | ||||
| @@ -74,10 +74,10 @@ object FtsWork { | ||||
|   def log[F[_]](f: Logger[F] => F[Unit]): FtsWork[F] = | ||||
|     FtsWork(ctx => f(ctx.logger)) | ||||
|  | ||||
|   def clearIndex[F[_]: FlatMap](coll: Option[Ident]): FtsWork[F] = | ||||
|   def clearIndex[F[_]: FlatMap](coll: Option[CollectiveId]): FtsWork[F] = | ||||
|     coll match { | ||||
|       case Some(cid) => | ||||
|         log[F](_.debug(s"Clearing index data for collective '${cid.id}'")) ++ FtsWork( | ||||
|         log[F](_.debug(s"Clearing index data for collective '${cid.value}'")) ++ FtsWork( | ||||
|           ctx => ctx.fts.clear(ctx.logger, cid) | ||||
|         ) | ||||
|       case None => | ||||
| @@ -86,7 +86,7 @@ object FtsWork { | ||||
|         ) | ||||
|     } | ||||
|  | ||||
|   def insertAll[F[_]: FlatMap](coll: Option[Ident]): FtsWork[F] = | ||||
|   def insertAll[F[_]: FlatMap](coll: Option[CollectiveId]): FtsWork[F] = | ||||
|     log[F](_.info("Inserting all data to index")) ++ FtsWork | ||||
|       .all( | ||||
|         FtsWork(ctx => | ||||
|   | ||||
| @@ -8,11 +8,11 @@ package docspell.joex.fts | ||||
|  | ||||
| import cats.effect._ | ||||
| import cats.implicits._ | ||||
|  | ||||
| import docspell.backend.fulltext.CreateIndex | ||||
| import docspell.common._ | ||||
| import docspell.ftsclient._ | ||||
| import docspell.joex.Config | ||||
| import docspell.scheduler.usertask.UserTaskScope | ||||
| import docspell.scheduler.{Job, Task} | ||||
| import docspell.store.Store | ||||
|  | ||||
| @@ -43,10 +43,9 @@ object MigrationTask { | ||||
|     Job | ||||
|       .createNew( | ||||
|         taskName, | ||||
|         DocspellSystem.taskGroup, | ||||
|         UserTaskScope.system, | ||||
|         (), | ||||
|         "Create full-text index", | ||||
|         DocspellSystem.taskGroup, | ||||
|         Priority.Low, | ||||
|         Some(DocspellSystem.migrationTaskTracker) | ||||
|       ) | ||||
|   | ||||
| @@ -40,7 +40,7 @@ object ReIndexTask { | ||||
|   def onCancel[F[_]]: Task[F, Args, Unit] = | ||||
|     Task.log[F, Args](_.warn("Cancelling full-text re-index task")) | ||||
|  | ||||
|   private def clearData[F[_]: Async](collective: Option[Ident]): FtsWork[F] = | ||||
|   private def clearData[F[_]: Async](collective: Option[CollectiveId]): FtsWork[F] = | ||||
|     FtsWork.log[F](_.info("Clearing index data")) ++ | ||||
|       (collective match { | ||||
|         case Some(_) => | ||||
|   | ||||
| @@ -8,10 +8,8 @@ package docspell.joex.learn | ||||
|  | ||||
| import cats.data.NonEmptyList | ||||
| import cats.implicits._ | ||||
|  | ||||
| import docspell.common.Ident | ||||
| import docspell.common.CollectiveId | ||||
| import docspell.store.records.{RClassifierModel, RClassifierSetting} | ||||
|  | ||||
| import doobie._ | ||||
|  | ||||
| final class ClassifierName(val name: String) extends AnyVal | ||||
| @@ -37,12 +35,12 @@ object ClassifierName { | ||||
|   val correspondentPerson: ClassifierName = | ||||
|     apply("correspondentperson") | ||||
|  | ||||
|   def findTagClassifiers[F[_]](coll: Ident): ConnectionIO[List[ClassifierName]] = | ||||
|   def findTagClassifiers(coll: CollectiveId): ConnectionIO[List[ClassifierName]] = | ||||
|     for { | ||||
|       categories <- RClassifierSetting.getActiveCategories(coll) | ||||
|     } yield categories.map(tagCategory) | ||||
|  | ||||
|   def findTagModels[F[_]](coll: Ident): ConnectionIO[List[RClassifierModel]] = | ||||
|   def findTagModels(coll: CollectiveId): ConnectionIO[List[RClassifierModel]] = | ||||
|     for { | ||||
|       categories <- RClassifierSetting.getActiveCategories(coll) | ||||
|       models <- NonEmptyList.fromList(categories) match { | ||||
| @@ -53,7 +51,9 @@ object ClassifierName { | ||||
|       } | ||||
|     } yield models | ||||
|  | ||||
|   def findOrphanTagModels[F[_]](coll: Ident): ConnectionIO[List[RClassifierModel]] = | ||||
|   def findOrphanTagModels( | ||||
|       coll: CollectiveId | ||||
|   ): ConnectionIO[List[RClassifierModel]] = | ||||
|     for { | ||||
|       cats <- RClassifierSetting.getActiveCategories(coll) | ||||
|       allModels = RClassifierModel.findAllByQuery(coll, s"$categoryPrefix%") | ||||
|   | ||||
| @@ -26,7 +26,7 @@ object Classify { | ||||
|       workingDir: Path, | ||||
|       store: Store[F], | ||||
|       classifier: TextClassifier[F], | ||||
|       coll: Ident, | ||||
|       coll: CollectiveId, | ||||
|       text: String | ||||
|   )(cname: ClassifierName): F[Option[String]] = | ||||
|     (for { | ||||
|   | ||||
| @@ -48,7 +48,7 @@ object LearnClassifierTask { | ||||
|           .learnAll( | ||||
|             analyser, | ||||
|             store, | ||||
|             ctx.args.collective, | ||||
|             ctx.args.collectiveId, | ||||
|             cfg.classification.itemCount, | ||||
|             cfg.maxLength | ||||
|           ) | ||||
| @@ -69,7 +69,7 @@ object LearnClassifierTask { | ||||
|           _ <- OptionT.liftF( | ||||
|             LearnTags | ||||
|               .learnAllTagCategories(analyser, store)( | ||||
|                 ctx.args.collective, | ||||
|                 ctx.args.collectiveId, | ||||
|                 maxItems, | ||||
|                 cfg.maxLength | ||||
|               ) | ||||
| @@ -82,7 +82,7 @@ object LearnClassifierTask { | ||||
|         clearObsoleteTagModels(ctx, store) *> | ||||
|         // when tags are deleted, categories may get removed. fix the json array | ||||
|         store | ||||
|           .transact(RClassifierSetting.fixCategoryList(ctx.args.collective)) | ||||
|           .transact(RClassifierSetting.fixCategoryList(ctx.args.collectiveId)) | ||||
|           .map(_ => ()) | ||||
|     } | ||||
|  | ||||
| @@ -92,7 +92,7 @@ object LearnClassifierTask { | ||||
|   ): F[Unit] = | ||||
|     for { | ||||
|       list <- store.transact( | ||||
|         ClassifierName.findOrphanTagModels(ctx.args.collective) | ||||
|         ClassifierName.findOrphanTagModels(ctx.args.collectiveId) | ||||
|       ) | ||||
|       _ <- ctx.logger.info( | ||||
|         s"Found ${list.size} obsolete model files that are deleted now." | ||||
| @@ -110,7 +110,7 @@ object LearnClassifierTask { | ||||
|       cfg: Config.TextAnalysis | ||||
|   ): OptionT[F, OCollective.Classifier] = | ||||
|     if (cfg.classification.enabled) | ||||
|       OptionT(store.transact(RClassifierSetting.findById(ctx.args.collective))) | ||||
|       OptionT(store.transact(RClassifierSetting.findById(ctx.args.collectiveId))) | ||||
|         .filter(_.autoTagEnabled) | ||||
|         .map(OCollective.Classifier.fromRecord) | ||||
|     else | ||||
|   | ||||
| @@ -21,7 +21,7 @@ object LearnItemEntities { | ||||
|   def learnAll[F[_]: Async, A]( | ||||
|       analyser: TextAnalyser[F], | ||||
|       store: Store[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       maxItems: Int, | ||||
|       maxTextLen: Int | ||||
|   ): Task[F, A, Unit] = | ||||
| @@ -35,7 +35,7 @@ object LearnItemEntities { | ||||
|   def learnCorrOrg[F[_]: Async, A]( | ||||
|       analyser: TextAnalyser[F], | ||||
|       store: Store[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       maxItems: Int, | ||||
|       maxTextLen: Int | ||||
|   ): Task[F, A, Unit] = | ||||
| @@ -47,7 +47,7 @@ object LearnItemEntities { | ||||
|   def learnCorrPerson[F[_]: Async, A]( | ||||
|       analyser: TextAnalyser[F], | ||||
|       store: Store[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       maxItems: Int, | ||||
|       maxTextLen: Int | ||||
|   ): Task[F, A, Unit] = | ||||
| @@ -59,7 +59,7 @@ object LearnItemEntities { | ||||
|   def learnConcPerson[F[_]: Async, A]( | ||||
|       analyser: TextAnalyser[F], | ||||
|       store: Store[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       maxItems: Int, | ||||
|       maxTextLen: Int | ||||
|   ): Task[F, A, Unit] = | ||||
| @@ -71,7 +71,7 @@ object LearnItemEntities { | ||||
|   def learnConcEquip[F[_]: Async, A]( | ||||
|       analyser: TextAnalyser[F], | ||||
|       store: Store[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       maxItems: Int, | ||||
|       maxTextLen: Int | ||||
|   ): Task[F, A, Unit] = | ||||
| @@ -83,7 +83,7 @@ object LearnItemEntities { | ||||
|   private def learn[F[_]: Async, A]( | ||||
|       store: Store[F], | ||||
|       analyser: TextAnalyser[F], | ||||
|       collective: Ident | ||||
|       collective: CollectiveId | ||||
|   )(cname: ClassifierName, data: Context[F, _] => Stream[F, Data]): Task[F, A, Unit] = | ||||
|     Task { ctx => | ||||
|       ctx.logger.info(s"Learn classifier ${cname.name}") *> | ||||
|   | ||||
| @@ -21,7 +21,7 @@ object LearnTags { | ||||
|   def learnTagCategory[F[_]: Async, A]( | ||||
|       analyser: TextAnalyser[F], | ||||
|       store: Store[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       maxItems: Int, | ||||
|       maxTextLen: Int | ||||
|   )( | ||||
| @@ -44,7 +44,7 @@ object LearnTags { | ||||
|     } | ||||
|  | ||||
|   def learnAllTagCategories[F[_]: Async, A](analyser: TextAnalyser[F], store: Store[F])( | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       maxItems: Int, | ||||
|       maxTextLen: Int | ||||
|   ): Task[F, A, Unit] = | ||||
|   | ||||
| @@ -20,7 +20,7 @@ object SelectItems { | ||||
|   val pageSep = LearnClassifierTask.pageSep | ||||
|   val noClass = LearnClassifierTask.noClass | ||||
|  | ||||
|   def forCategory[F[_]](store: Store[F], collective: Ident)( | ||||
|   def forCategory[F[_]](store: Store[F], collective: CollectiveId)( | ||||
|       maxItems: Int, | ||||
|       category: String, | ||||
|       maxTextLen: Int | ||||
| @@ -36,7 +36,7 @@ object SelectItems { | ||||
|  | ||||
|   def forCorrOrg[F[_]]( | ||||
|       store: Store[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       maxItems: Int, | ||||
|       maxTextLen: Int | ||||
|   ): Stream[F, Data] = { | ||||
| @@ -51,7 +51,7 @@ object SelectItems { | ||||
|  | ||||
|   def forCorrPerson[F[_]]( | ||||
|       store: Store[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       maxItems: Int, | ||||
|       maxTextLen: Int | ||||
|   ): Stream[F, Data] = { | ||||
| @@ -66,7 +66,7 @@ object SelectItems { | ||||
|  | ||||
|   def forConcPerson[F[_]]( | ||||
|       store: Store[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       maxItems: Int, | ||||
|       maxTextLen: Int | ||||
|   ): Stream[F, Data] = { | ||||
| @@ -81,7 +81,7 @@ object SelectItems { | ||||
|  | ||||
|   def forConcEquip[F[_]]( | ||||
|       store: Store[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       maxItems: Int, | ||||
|       maxTextLen: Int | ||||
|   ): Stream[F, Data] = { | ||||
| @@ -94,7 +94,10 @@ object SelectItems { | ||||
|     store.transact(connStream) | ||||
|   } | ||||
|  | ||||
|   private def allItems(collective: Ident, max: Int): Stream[ConnectionIO, Ident] = { | ||||
|   private def allItems( | ||||
|       collective: CollectiveId, | ||||
|       max: Int | ||||
|   ): Stream[ConnectionIO, Ident] = { | ||||
|     val limit = if (max <= 0) Batch.all else Batch.limit(max) | ||||
|     QItem.findAllNewesFirst(collective, 10, limit) | ||||
|   } | ||||
|   | ||||
| @@ -21,7 +21,7 @@ object StoreClassifierModel { | ||||
|   def handleModel[F[_]: Async]( | ||||
|       store: Store[F], | ||||
|       logger: Logger[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       modelName: ClassifierName | ||||
|   )( | ||||
|       trainedModel: ClassifierModel | ||||
|   | ||||
| @@ -11,12 +11,12 @@ import cats.data.OptionT | ||||
| import cats.effect._ | ||||
| import cats.implicits._ | ||||
| import fs2.Stream | ||||
|  | ||||
| import docspell.backend.JobFactory | ||||
| import docspell.common._ | ||||
| import docspell.common.util.Zip | ||||
| import docspell.logging.Logger | ||||
| import docspell.scheduler._ | ||||
| import docspell.scheduler.usertask.UserTaskScope | ||||
| import docspell.store.Store | ||||
|  | ||||
| /** Task to submit multiple files at once. By default, one file in an upload results in | ||||
| @@ -90,7 +90,7 @@ object MultiUploadArchiveTask { | ||||
|       submitter = currentJob.map(_.submitter).getOrElse(DocspellSystem.user) | ||||
|       job <- JobFactory.processItem( | ||||
|         args, | ||||
|         AccountId(ctx.args.meta.collective, submitter), | ||||
|         UserTaskScope(ctx.args.meta.collective, submitter.some), | ||||
|         prio, | ||||
|         None | ||||
|       ) | ||||
|   | ||||
| @@ -53,7 +53,12 @@ object PeriodicDueItemsTask { | ||||
|   def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])( | ||||
|       cont: Vector[NotificationChannel] => F[Unit] | ||||
|   ): F[Unit] = | ||||
|     TaskOperations.withChannel(ctx.logger, ctx.args.channels, ctx.args.account, ops)(cont) | ||||
|     TaskOperations.withChannel( | ||||
|       ctx.logger, | ||||
|       ctx.args.channels, | ||||
|       ctx.args.account.userId, | ||||
|       ops | ||||
|     )(cont) | ||||
|  | ||||
|   def withItems[F[_]: Sync]( | ||||
|       ctx: Context[F, Args], | ||||
|   | ||||
| @@ -54,7 +54,12 @@ object PeriodicQueryTask { | ||||
|   def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])( | ||||
|       cont: Vector[NotificationChannel] => F[Unit] | ||||
|   ): F[Unit] = | ||||
|     TaskOperations.withChannel(ctx.logger, ctx.args.channels, ctx.args.account, ops)(cont) | ||||
|     TaskOperations.withChannel( | ||||
|       ctx.logger, | ||||
|       ctx.args.channels, | ||||
|       ctx.args.account.userId, | ||||
|       ops | ||||
|     )(cont) | ||||
|  | ||||
|   private def queryString(q: ItemQuery.Expr) = | ||||
|     ItemQueryParser.asString(q) | ||||
| @@ -64,7 +69,10 @@ object PeriodicQueryTask { | ||||
|   ): F[Unit] = { | ||||
|     def fromBookmark(id: String) = | ||||
|       store | ||||
|         .transact(RQueryBookmark.findByNameOrId(ctx.args.account, id)) | ||||
|         .transact( | ||||
|           RQueryBookmark | ||||
|             .findByNameOrId(ctx.args.account.collectiveId, ctx.args.account.userId, id) | ||||
|         ) | ||||
|         .map(_.map(_.query)) | ||||
|         .flatTap(q => | ||||
|           ctx.logger.debug(s"Loaded bookmark '$id': ${q.map(_.expr).map(queryString)}") | ||||
| @@ -72,7 +80,9 @@ object PeriodicQueryTask { | ||||
|  | ||||
|     def fromShare(id: String) = | ||||
|       store | ||||
|         .transact(RShare.findOneByCollective(ctx.args.account.collective, Some(true), id)) | ||||
|         .transact( | ||||
|           RShare.findOneByCollective(ctx.args.account.collectiveId, Some(true), id) | ||||
|         ) | ||||
|         .map(_.map(_.query)) | ||||
|         .flatTap(q => | ||||
|           ctx.logger.debug(s"Loaded share '$id': ${q.map(_.expr).map(queryString)}") | ||||
|   | ||||
| @@ -25,24 +25,24 @@ trait TaskOperations { | ||||
|   def withChannel[F[_]: Sync]( | ||||
|       logger: Logger[F], | ||||
|       channelsIn: NonEmptyList[ChannelRef], | ||||
|       accountId: AccountId, | ||||
|       userId: Ident, | ||||
|       ops: ONotification[F] | ||||
|   )( | ||||
|       cont: Vector[NotificationChannel] => F[Unit] | ||||
|   ): F[Unit] = { | ||||
|     val channels = | ||||
|       channelsIn.toList.toVector.flatTraverse(ops.findNotificationChannel(_, accountId)) | ||||
|       channelsIn.toList.toVector.flatTraverse(ops.findNotificationChannel(_, userId)) | ||||
|  | ||||
|     channels.flatMap { ch => | ||||
|       if (ch.isEmpty) | ||||
|         logger.error(s"No channels found for the given data: ${channelsIn}") | ||||
|         logger.error(s"No channels found for the given data: $channelsIn") | ||||
|       else cont(ch) | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   def withEventContext[F[_]]( | ||||
|       logger: Logger[F], | ||||
|       account: AccountId, | ||||
|       account: AccountInfo, | ||||
|       baseUrl: Option[LenientUri], | ||||
|       items: Vector[ListItem], | ||||
|       contentStart: Option[String], | ||||
|   | ||||
| @@ -9,10 +9,10 @@ package docspell.joex.pagecount | ||||
| import cats.effect._ | ||||
| import cats.implicits._ | ||||
| import fs2.{Chunk, Stream} | ||||
|  | ||||
| import docspell.backend.JobFactory | ||||
| import docspell.common._ | ||||
| import docspell.scheduler._ | ||||
| import docspell.scheduler.usertask.UserTaskScope | ||||
| import docspell.store.Store | ||||
| import docspell.store.records.RAttachment | ||||
|  | ||||
| @@ -51,12 +51,12 @@ object AllPageCountTask { | ||||
|       .compile | ||||
|       .foldMonoid | ||||
|  | ||||
|   private def findAttachments[F[_]] = | ||||
|   private def findAttachments = | ||||
|     RAttachment.findAllWithoutPageCount(50) | ||||
|  | ||||
|   private def createJobs[F[_]: Sync](ras: Chunk[RAttachment]): Stream[F, Job[String]] = { | ||||
|     def mkJob(ra: RAttachment): F[Job[MakePageCountArgs]] = | ||||
|       JobFactory.makePageCount(MakePageCountArgs(ra.id), None) | ||||
|       JobFactory.makePageCount(MakePageCountArgs(ra.id), UserTaskScope.system) | ||||
|  | ||||
|     val jobs = ras.traverse(mkJob) | ||||
|     Stream.evalUnChunk(jobs).map(_.encode) | ||||
| @@ -66,10 +66,9 @@ object AllPageCountTask { | ||||
|     Job | ||||
|       .createNew( | ||||
|         AllPageCountTask.taskName, | ||||
|         DocspellSystem.taskGroup, | ||||
|         UserTaskScope.system, | ||||
|         (), | ||||
|         "Create all page-counts", | ||||
|         DocspellSystem.taskGroup, | ||||
|         Priority.Low, | ||||
|         Some(DocspellSystem.allPageCountTaskTracker) | ||||
|       ) | ||||
|   | ||||
| @@ -9,9 +9,9 @@ package docspell.joex.pdfconv | ||||
| import cats.effect._ | ||||
| import cats.implicits._ | ||||
| import fs2.{Chunk, Stream} | ||||
|  | ||||
| import docspell.common._ | ||||
| import docspell.scheduler._ | ||||
| import docspell.scheduler.usertask.UserTaskScope | ||||
| import docspell.store.Store | ||||
| import docspell.store.records.RAttachment | ||||
|  | ||||
| @@ -55,7 +55,10 @@ object ConvertAllPdfTask { | ||||
|   private def createJobs[F[_]: Sync]( | ||||
|       ctx: Context[F, Args] | ||||
|   )(ras: Chunk[RAttachment]): Stream[F, Job[String]] = { | ||||
|     val collectiveOrSystem = ctx.args.collective.getOrElse(DocspellSystem.taskGroup) | ||||
|     val collectiveOrSystem = | ||||
|       ctx.args.collective | ||||
|         .map(UserTaskScope.collective) | ||||
|         .getOrElse(UserTaskScope.system) | ||||
|  | ||||
|     def mkJob(ra: RAttachment): F[Job[PdfConvTask.Args]] = | ||||
|       Job.createNew( | ||||
| @@ -63,7 +66,6 @@ object ConvertAllPdfTask { | ||||
|         collectiveOrSystem, | ||||
|         PdfConvTask.Args(ra.id), | ||||
|         s"Convert pdf ${ra.id.id}/${ra.name.getOrElse("-")}", | ||||
|         collectiveOrSystem, | ||||
|         Priority.Low, | ||||
|         Some(PdfConvTask.taskName / ra.id) | ||||
|       ) | ||||
|   | ||||
| @@ -9,11 +9,11 @@ package docspell.joex.preview | ||||
| import cats.effect._ | ||||
| import cats.implicits._ | ||||
| import fs2.{Chunk, Stream} | ||||
|  | ||||
| import docspell.backend.JobFactory | ||||
| import docspell.common.MakePreviewArgs.StoreMode | ||||
| import docspell.common._ | ||||
| import docspell.scheduler._ | ||||
| import docspell.scheduler.usertask.UserTaskScope | ||||
| import docspell.store.Store | ||||
| import docspell.store.records.RAttachment | ||||
|  | ||||
| @@ -64,15 +64,12 @@ object AllPreviewsTask { | ||||
|   private def createJobs[F[_]: Sync]( | ||||
|       ctx: Context[F, Args] | ||||
|   )(ras: Chunk[RAttachment]): Stream[F, Job[MakePreviewArgs]] = { | ||||
|     val collectiveOrSystem = { | ||||
|       val cid = ctx.args.collective.getOrElse(DocspellSystem.taskGroup) | ||||
|       AccountId(cid, DocspellSystem.user) | ||||
|     } | ||||
|  | ||||
|     def mkJob(ra: RAttachment): F[Job[MakePreviewArgs]] = | ||||
|       JobFactory.makePreview( | ||||
|         MakePreviewArgs(ra.id, ctx.args.storeMode), | ||||
|         collectiveOrSystem.some | ||||
|         ctx.args.collective | ||||
|           .map(UserTaskScope.collective) | ||||
|           .getOrElse(UserTaskScope.system) | ||||
|       ) | ||||
|  | ||||
|     val jobs = ras.traverse(mkJob) | ||||
| @@ -81,8 +78,9 @@ object AllPreviewsTask { | ||||
|  | ||||
|   def job[F[_]: Sync]( | ||||
|       storeMode: MakePreviewArgs.StoreMode, | ||||
|       cid: Option[Ident] | ||||
|       cid: Option[CollectiveId] | ||||
|   ): F[Job[String]] = | ||||
|     JobFactory.allPreviews(AllPreviewsArgs(cid, storeMode), None).map(_.encode) | ||||
|  | ||||
|     JobFactory | ||||
|       .allPreviews(AllPreviewsArgs(cid, storeMode), UserTaskScope.system) | ||||
|       .map(_.encode) | ||||
| } | ||||
|   | ||||
| @@ -75,7 +75,7 @@ object AttachmentPreview { | ||||
|  | ||||
|   private def createRecord[F[_]: Sync]( | ||||
|       store: Store[F], | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       png: Stream[F, Byte], | ||||
|       ra: RAttachment | ||||
|   ): F[RAttachmentPreview] = { | ||||
|   | ||||
| @@ -81,7 +81,7 @@ object TextAnalysis { | ||||
|       labels <- analyser.annotate( | ||||
|         ctx.logger, | ||||
|         sett, | ||||
|         ctx.args.meta.collective, | ||||
|         ctx.args.meta.collective.valueAsIdent, | ||||
|         rm.content.getOrElse("") | ||||
|       ) | ||||
|     } yield (rm.copy(nerlabels = labels.all.toList), AttachmentDates(rm, labels.dates)) | ||||
|   | ||||
| @@ -50,7 +50,7 @@ object TextExtraction { | ||||
|           None, | ||||
|           ctx.args.meta.language | ||||
|         ) | ||||
|         _ <- fts.indexData(ctx.logger, (idxItem +: txt.map(_.td)): _*) | ||||
|         _ <- fts.indexData(ctx.logger, idxItem +: txt.map(_.td): _*) | ||||
|         dur <- start | ||||
|         extractedTags = txt.flatMap(_.tags).distinct.toList | ||||
|         _ <- ctx.logger.info(s"Text extraction finished in ${dur.formatExact}.") | ||||
| @@ -71,7 +71,7 @@ object TextExtraction { | ||||
|       store: Store[F], | ||||
|       cfg: ExtractConfig, | ||||
|       lang: Language, | ||||
|       collective: Ident, | ||||
|       collective: CollectiveId, | ||||
|       item: ItemData | ||||
|   )(ra: RAttachment): F[Result] = { | ||||
|     def makeTextData(pair: (RAttachmentMeta, List[String])): Result = | ||||
|   | ||||
| @@ -42,15 +42,15 @@ object ScanMailboxTask { | ||||
|     Task { ctx => | ||||
|       for { | ||||
|         _ <- ctx.logger.info( | ||||
|           s"=== Start importing mails for user ${ctx.args.account.user.id}" | ||||
|           s"=== Start importing mails for user ${ctx.args.account.login.id}" | ||||
|         ) | ||||
|         _ <- ctx.logger.debug(s"Settings: ${ctx.args.asJson.noSpaces}") | ||||
|         mailCfg <- getMailSettings(ctx, store) | ||||
|         folders = ctx.args.folders.mkString(", ") | ||||
|         userId = ctx.args.account.user | ||||
|         login = ctx.args.account.login | ||||
|         imapConn = ctx.args.imapConnection | ||||
|         _ <- ctx.logger.info( | ||||
|           s"Reading mails for user ${userId.id} from ${imapConn.id}/$folders" | ||||
|           s"Reading mails for user ${login.id} from ${imapConn.id}/$folders" | ||||
|         ) | ||||
|         _ <- importMails(cfg, mailCfg, emil, upload, joex, ctx, store) | ||||
|       } yield () | ||||
| @@ -61,7 +61,7 @@ object ScanMailboxTask { | ||||
|  | ||||
|   def getMailSettings[F[_]: Sync](ctx: Context[F, Args], store: Store[F]): F[RUserImap] = | ||||
|     store | ||||
|       .transact(RUserImap.getByName(ctx.args.account, ctx.args.imapConnection)) | ||||
|       .transact(RUserImap.getByName(ctx.args.account.userId, ctx.args.imapConnection)) | ||||
|       .flatMap { | ||||
|         case Some(c) => c.pure[F] | ||||
|         case None => | ||||
| @@ -234,13 +234,13 @@ object ScanMailboxTask { | ||||
|           ctx.logger.debug("Not matching on subjects. No filter given") *> headers.pure[F] | ||||
|       } | ||||
|  | ||||
|     def filterMessageIds[C](headers: Vector[MailHeaderItem]): F[Vector[MailHeaderItem]] = | ||||
|     def filterMessageIds(headers: Vector[MailHeaderItem]): F[Vector[MailHeaderItem]] = | ||||
|       NonEmptyList.fromFoldable(headers.flatMap(_.mh.messageId)) match { | ||||
|         case Some(nl) => | ||||
|           for { | ||||
|             archives <- store.transact( | ||||
|               RAttachmentArchive | ||||
|                 .findByMessageIdAndCollective(nl, ctx.args.account.collective) | ||||
|                 .findByMessageIdAndCollective(nl, ctx.args.account.collectiveId) | ||||
|             ) | ||||
|             existing = archives.flatMap(_.messageId).toSet | ||||
|             mails <- headers | ||||
| @@ -265,7 +265,7 @@ object ScanMailboxTask { | ||||
|             store.transact( | ||||
|               QOrganization | ||||
|                 .findPersonByContact( | ||||
|                   ctx.args.account.collective, | ||||
|                   ctx.args.account.collectiveId, | ||||
|                   from.address, | ||||
|                   Some(ContactKind.Email), | ||||
|                   Some(NonEmptyList.of(PersonUse.concerning)) | ||||
| @@ -320,7 +320,7 @@ object ScanMailboxTask { | ||||
|         dir <- getDirection(mail.header) | ||||
|         meta = OUpload.UploadMeta( | ||||
|           Some(dir), | ||||
|           s"mailbox-${ctx.args.account.user.id}", | ||||
|           s"mailbox-${ctx.args.account.login.id}", | ||||
|           args.itemFolder, | ||||
|           Seq.empty, | ||||
|           true, | ||||
| @@ -337,7 +337,12 @@ object ScanMailboxTask { | ||||
|           priority = Priority.Low, | ||||
|           tracker = None | ||||
|         ) | ||||
|         res <- upload.submit(data, ctx.args.account, None) | ||||
|         res <- upload.submit( | ||||
|           data, | ||||
|           ctx.args.account.collectiveId, | ||||
|           ctx.args.account.userId.some, | ||||
|           None | ||||
|         ) | ||||
|       } yield res | ||||
|     } | ||||
|  | ||||
|   | ||||
| @@ -9,13 +9,11 @@ package docspell.joex.updatecheck | ||||
| import cats.data.OptionT | ||||
| import cats.effect._ | ||||
| import cats.implicits._ | ||||
|  | ||||
| import docspell.common._ | ||||
| import docspell.scheduler.Task | ||||
| import docspell.scheduler.usertask.UserTask | ||||
| import docspell.store.Store | ||||
| import docspell.store.records.RUserEmail | ||||
|  | ||||
| import docspell.store.records.{RUser, RUserEmail} | ||||
| import emil._ | ||||
|  | ||||
| object UpdateCheckTask { | ||||
| @@ -83,7 +81,9 @@ object UpdateCheckTask { | ||||
|       store: Store[F], | ||||
|       cfg: UpdateCheckConfig | ||||
|   ): F[RUserEmail] = | ||||
|     OptionT(store.transact(RUserEmail.getByName(cfg.senderAccount, cfg.smtpId))) | ||||
|     OptionT(store.transact(RUser.findByAccount(cfg.senderAccount))) | ||||
|       .map(_.uid) | ||||
|       .flatMap(uid => OptionT(store.transact(RUserEmail.getByName(uid, cfg.smtpId)))) | ||||
|       .getOrElseF( | ||||
|         Sync[F].raiseError( | ||||
|           new Exception( | ||||
|   | ||||
| @@ -18,7 +18,7 @@ import munit._ | ||||
| class TagsChangedCtxTest extends FunSuite { | ||||
|  | ||||
|   val url = LenientUri.unsafe("http://test") | ||||
|   val account = AccountId(id("user2"), id("user2")) | ||||
|   val account = AccountInfo(CollectiveId(1), id("user2"), id("user-abc-def"), id("user2")) | ||||
|   val tag = Tag(id("a-b-1"), "tag-red", Some("doctype")) | ||||
|   val item = Item( | ||||
|     id = id("item-1"), | ||||
|   | ||||
| @@ -0,0 +1,58 @@ | ||||
| /* | ||||
|  * Copyright 2020 Eike K. & Contributors | ||||
|  * | ||||
|  * SPDX-License-Identifier: AGPL-3.0-or-later | ||||
|  */ | ||||
|  | ||||
| package db.migration.data | ||||
|  | ||||
| import db.migration.data.DownloadZipArgs.DownloadRequest | ||||
| import docspell.common._ | ||||
| import docspell.query.ItemQuery.Expr.ValidItemStates | ||||
| import docspell.query.{ItemQuery, ItemQueryParser} | ||||
| import docspell.store.queries.Query | ||||
| import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} | ||||
| import io.circe.{Decoder, Encoder} | ||||
|  | ||||
| /** @deprecated replaced with a version using `AccountInfo` */ | ||||
| final case class DownloadZipArgs(accountId: AccountId, req: DownloadRequest) | ||||
|  | ||||
| object DownloadZipArgs { | ||||
|   val taskName: Ident = Ident.unsafe("download-query-zip") | ||||
|  | ||||
|   final case class DownloadRequest( | ||||
|       query: ItemQuery, | ||||
|       fileType: DownloadAllType, | ||||
|       maxFiles: Int, | ||||
|       maxSize: ByteSize | ||||
|   ) { | ||||
|     def toQuery(account: AccountInfo): Query = | ||||
|       Query | ||||
|         .all(account) | ||||
|         .withFix(_.andQuery(ValidItemStates)) | ||||
|         .withCond(_ => Query.QueryExpr(query.expr)) | ||||
|  | ||||
|     def itemQueryString = | ||||
|       ItemQueryParser.asString(query.expr) | ||||
|   } | ||||
|   object DownloadRequest { | ||||
|     implicit val itemQueryDecoder: Decoder[ItemQuery] = | ||||
|       Decoder.decodeString.emap(str => ItemQueryParser.parse(str).left.map(_.render)) | ||||
|  | ||||
|     implicit val itemQueryEncoder: Encoder[ItemQuery] = | ||||
|       Encoder.encodeString.contramap(q => | ||||
|         q.raw.getOrElse(ItemQueryParser.unsafeAsString(q.expr)) | ||||
|       ) | ||||
|  | ||||
|     implicit val jsonDecoder: Decoder[DownloadRequest] = | ||||
|       deriveDecoder | ||||
|  | ||||
|     implicit val jsonEncoder: Encoder[DownloadRequest] = | ||||
|       deriveEncoder | ||||
|   } | ||||
|  | ||||
|   implicit val jsonEncoder: Encoder[DownloadZipArgs] = | ||||
|     deriveEncoder | ||||
|   implicit val jsonDecoder: Decoder[DownloadZipArgs] = | ||||
|     deriveDecoder | ||||
| } | ||||
| @@ -202,7 +202,7 @@ object RNotificationChannel { | ||||
|                   s"Looking up user smtp for ${userId.id} and ${conn.id}" | ||||
|                 ) | ||||
|               ) | ||||
|               mailConn <- OptionT(RUserEmail.getByUser(userId, conn)) | ||||
|               mailConn <- OptionT(RUserEmail.getByName(userId, conn)) | ||||
|               rec = RNotificationChannelMail( | ||||
|                 id, | ||||
|                 userId, | ||||
|   | ||||
| @@ -175,13 +175,6 @@ object RUserEmail { | ||||
|     run(select(t.all), from(t), t.uid === userId).query[RUserEmail].to[Vector] | ||||
|   } | ||||
|  | ||||
|   def getByUser(userId: Ident, name: Ident): ConnectionIO[Option[RUserEmail]] = { | ||||
|     val t = Table(None) | ||||
|     run(select(t.all), from(t), t.uid === userId && t.name === name) | ||||
|       .query[RUserEmail] | ||||
|       .option | ||||
|   } | ||||
|  | ||||
|   private def findByAccount0( | ||||
|       userId: Ident, | ||||
|       nameQ: Option[String], | ||||
|   | ||||
| @@ -57,10 +57,10 @@ class TempFtsOpsTest extends DatabaseTest { | ||||
|  | ||||
|   def prepareItems(store: Store[IO]) = | ||||
|     for { | ||||
|       _ <- store.transact(RCollective.insert(makeCollective(CollectiveId(2)))) | ||||
|       _ <- store.transact(RUser.insert(makeUser(CollectiveId(2)))) | ||||
|       cid <- store.transact(RCollective.insert(makeCollective)) | ||||
|       _ <- store.transact(RUser.insert(makeUser(cid))) | ||||
|       items = (0 until 200) | ||||
|         .map(makeItem(_, CollectiveId(2))) | ||||
|         .map(makeItem(_, cid)) | ||||
|         .toList | ||||
|       _ <- items.traverse(i => store.transact(RItem.insert(i))) | ||||
|     } yield () | ||||
| @@ -184,9 +184,9 @@ class TempFtsOpsTest extends DatabaseTest { | ||||
|       Timestamp(Instant.now) | ||||
|     ) | ||||
|  | ||||
|   def makeCollective(cid: CollectiveId): RCollective = | ||||
|   def makeCollective: RCollective = | ||||
|     RCollective( | ||||
|       cid, | ||||
|       CollectiveId.unknown, | ||||
|       DocspellSystem.account.collective, | ||||
|       CollectiveState.Active, | ||||
|       Language.English, | ||||
|   | ||||
		Reference in New Issue
	
	Block a user