Merge pull request #1686 from eikek/refactor-collective

Refactor collective
This commit is contained in:
mergify[bot]
2022-08-07 19:57:25 +00:00
committed by GitHub
264 changed files with 4927 additions and 2031 deletions

View File

@ -12,7 +12,7 @@ import fs2.io.file.{Files, Path}
import docspell.addons.out.NewFile.Meta import docspell.addons.out.NewFile.Meta
import docspell.common.ProcessItemArgs.ProcessMeta import docspell.common.ProcessItemArgs.ProcessMeta
import docspell.common.{Ident, Language} import docspell.common.{CollectiveId, Ident, Language}
import docspell.logging.Logger import docspell.logging.Logger
import io.circe.Codec import io.circe.Codec
@ -45,7 +45,7 @@ object NewFile {
) { ) {
def toProcessMeta( def toProcessMeta(
cid: Ident, cid: CollectiveId,
itemId: Ident, itemId: Ident,
collLang: Option[Language], collLang: Option[Language],
sourceAbbrev: String sourceAbbrev: String

View File

@ -20,7 +20,7 @@ import io.circe.{Decoder, Encoder}
case class NewItem(metadata: Option[Meta], files: List[String]) { case class NewItem(metadata: Option[Meta], files: List[String]) {
def toProcessMeta( def toProcessMeta(
cid: Ident, cid: CollectiveId,
collLang: Option[Language], collLang: Option[Language],
sourceAbbrev: String sourceAbbrev: String
): ProcessItemArgs.ProcessMeta = ): ProcessItemArgs.ProcessMeta =
@ -62,7 +62,7 @@ object NewItem {
) { ) {
def toProcessArgs( def toProcessArgs(
cid: Ident, cid: CollectiveId,
collLang: Option[Language], collLang: Option[Language],
sourceAbbrev: String sourceAbbrev: String
): ProcessItemArgs.ProcessMeta = ): ProcessItemArgs.ProcessMeta =

View File

@ -13,7 +13,7 @@ trait AttachedEvent[R] {
def value: R def value: R
def event(account: AccountId, baseUrl: Option[LenientUri]): Iterable[Event] def event(account: AccountInfo, baseUrl: Option[LenientUri]): Iterable[Event]
def map[U](f: R => U): AttachedEvent[U] def map[U](f: R => U): AttachedEvent[U]
} }
@ -24,7 +24,7 @@ object AttachedEvent {
def only[R](v: R): AttachedEvent[R] = def only[R](v: R): AttachedEvent[R] =
new AttachedEvent[R] { new AttachedEvent[R] {
val value = v val value = v
def event(account: AccountId, baseUrl: Option[LenientUri]): Iterable[Event] = def event(account: AccountInfo, baseUrl: Option[LenientUri]): Iterable[Event] =
Iterable.empty[Event] Iterable.empty[Event]
def map[U](f: R => U): AttachedEvent[U] = def map[U](f: R => U): AttachedEvent[U] =
@ -33,10 +33,10 @@ object AttachedEvent {
def apply[R]( def apply[R](
v: R v: R
)(mkEvent: (AccountId, Option[LenientUri]) => Event): AttachedEvent[R] = )(mkEvent: (AccountInfo, Option[LenientUri]) => Event): AttachedEvent[R] =
new AttachedEvent[R] { new AttachedEvent[R] {
val value = v val value = v
def event(account: AccountId, baseUrl: Option[LenientUri]): Iterable[Event] = def event(account: AccountInfo, baseUrl: Option[LenientUri]): Iterable[Event] =
Some(mkEvent(account, baseUrl)) Some(mkEvent(account, baseUrl))
def map[U](f: R => U): AttachedEvent[U] = def map[U](f: R => U): AttachedEvent[U] =

View File

@ -13,8 +13,8 @@ import cats.syntax.all._
import docspell.backend.BackendCommands.EventContext import docspell.backend.BackendCommands.EventContext
import docspell.backend.ops.OCustomFields.SetValue import docspell.backend.ops.OCustomFields.SetValue
import docspell.backend.ops._ import docspell.backend.ops._
import docspell.common._
import docspell.common.bc._ import docspell.common.bc._
import docspell.common.{AccountId, Ident, LenientUri}
private[backend] class BackendCommands[F[_]: Sync]( private[backend] class BackendCommands[F[_]: Sync](
itemOps: OItem[F], itemOps: OItem[F],
@ -25,14 +25,14 @@ private[backend] class BackendCommands[F[_]: Sync](
) extends BackendCommandRunner[F, Unit] { ) extends BackendCommandRunner[F, Unit] {
private[this] val logger = docspell.logging.getLogger[F] private[this] val logger = docspell.logging.getLogger[F]
def run(collective: Ident, cmd: BackendCommand): F[Unit] = def run(collective: CollectiveId, cmd: BackendCommand): F[Unit] =
doRun(collective, cmd).attempt.flatMap { doRun(collective, cmd).attempt.flatMap {
case Right(_) => ().pure[F] case Right(_) => ().pure[F]
case Left(ex) => case Left(ex) =>
logger.error(ex)(s"Backend command $cmd failed for collective ${collective.id}.") logger.error(ex)(s"Backend command $cmd failed for collective $collective.")
} }
def doRun(collective: Ident, cmd: BackendCommand): F[Unit] = def doRun(collective: CollectiveId, cmd: BackendCommand): F[Unit] =
cmd match { cmd match {
case BackendCommand.ItemUpdate(item, actions) => case BackendCommand.ItemUpdate(item, actions) =>
actions.traverse_(a => runItemAction(collective, item, a)) actions.traverse_(a => runItemAction(collective, item, a))
@ -41,38 +41,38 @@ private[backend] class BackendCommands[F[_]: Sync](
actions.traverse_(a => runAttachAction(collective, item, attach, a)) actions.traverse_(a => runAttachAction(collective, item, attach, a))
} }
def runAll(collective: Ident, cmds: List[BackendCommand]): F[Unit] = def runAll(collective: CollectiveId, cmds: List[BackendCommand]): F[Unit] =
cmds.traverse_(run(collective, _)) cmds.traverse_(run(collective, _))
def runItemAction(collective: Ident, item: Ident, action: ItemAction): F[Unit] = def runItemAction(collective: CollectiveId, item: Ident, action: ItemAction): F[Unit] =
action match { action match {
case ItemAction.AddTags(tags) => case ItemAction.AddTags(tags) =>
logger.debug(s"Setting tags $tags on ${item.id} for ${collective.id}") *> logger.debug(s"Setting tags $tags on ${item.id} for ${collective.value}") *>
itemOps itemOps
.linkTags(item, tags.toList, collective) .linkTags(item, tags.toList, collective)
.flatMap(sendEvents) .flatMap(sendEvents)
case ItemAction.RemoveTags(tags) => case ItemAction.RemoveTags(tags) =>
logger.debug(s"Remove tags $tags on ${item.id} for ${collective.id}") *> logger.debug(s"Remove tags $tags on ${item.id} for ${collective.value}") *>
itemOps itemOps
.removeTagsMultipleItems(Nel.of(item), tags.toList, collective) .removeTagsMultipleItems(Nel.of(item), tags.toList, collective)
.flatMap(sendEvents) .flatMap(sendEvents)
case ItemAction.ReplaceTags(tags) => case ItemAction.ReplaceTags(tags) =>
logger.debug(s"Replace tags $tags on ${item.id} for ${collective.id}") *> logger.debug(s"Replace tags $tags on ${item.id} for $collective") *>
itemOps itemOps
.setTags(item, tags.toList, collective) .setTags(item, tags.toList, collective)
.flatMap(sendEvents) .flatMap(sendEvents)
case ItemAction.SetFolder(folder) => case ItemAction.SetFolder(folder) =>
logger.debug(s"Set folder $folder on ${item.id} for ${collective.id}") *> logger.debug(s"Set folder $folder on ${item.id} for $collective") *>
itemOps itemOps
.setFolder(item, folder, collective) .setFolder(item, folder, collective)
.void .void
case ItemAction.RemoveTagsCategory(cats) => case ItemAction.RemoveTagsCategory(cats) =>
logger.debug( logger.debug(
s"Remove tags in categories $cats on ${item.id} for ${collective.id}" s"Remove tags in categories $cats on ${item.id} for $collective"
) *> ) *>
itemOps itemOps
.removeTagsOfCategories(item, collective, cats) .removeTagsOfCategories(item, collective, cats)
@ -80,51 +80,51 @@ private[backend] class BackendCommands[F[_]: Sync](
case ItemAction.SetCorrOrg(id) => case ItemAction.SetCorrOrg(id) =>
logger.debug( logger.debug(
s"Set correspondent organization ${id.map(_.id)} for ${collective.id}" s"Set correspondent organization ${id.map(_.id)} for $collective"
) *> ) *>
itemOps.setCorrOrg(Nel.of(item), id, collective).void itemOps.setCorrOrg(Nel.of(item), id, collective).void
case ItemAction.SetCorrPerson(id) => case ItemAction.SetCorrPerson(id) =>
logger.debug( logger.debug(
s"Set correspondent person ${id.map(_.id)} for ${collective.id}" s"Set correspondent person ${id.map(_.id)} for $collective"
) *> ) *>
itemOps.setCorrPerson(Nel.of(item), id, collective).void itemOps.setCorrPerson(Nel.of(item), id, collective).void
case ItemAction.SetConcPerson(id) => case ItemAction.SetConcPerson(id) =>
logger.debug( logger.debug(
s"Set concerning person ${id.map(_.id)} for ${collective.id}" s"Set concerning person ${id.map(_.id)} for $collective"
) *> ) *>
itemOps.setConcPerson(Nel.of(item), id, collective).void itemOps.setConcPerson(Nel.of(item), id, collective).void
case ItemAction.SetConcEquipment(id) => case ItemAction.SetConcEquipment(id) =>
logger.debug( logger.debug(
s"Set concerning equipment ${id.map(_.id)} for ${collective.id}" s"Set concerning equipment ${id.map(_.id)} for $collective"
) *> ) *>
itemOps.setConcEquip(Nel.of(item), id, collective).void itemOps.setConcEquip(Nel.of(item), id, collective).void
case ItemAction.SetField(field, value) => case ItemAction.SetField(field, value) =>
logger.debug( logger.debug(
s"Set field on item ${item.id} ${field.id} to '$value' for ${collective.id}" s"Set field on item ${item.id} ${field.id} to '$value' for $collective"
) *> ) *>
fieldOps fieldOps
.setValue(item, SetValue(field, value, collective)) .setValue(item, SetValue(field, value, collective))
.flatMap(sendEvents) .flatMap(sendEvents)
case ItemAction.SetNotes(notes) => case ItemAction.SetNotes(notes) =>
logger.debug(s"Set notes on item ${item.id} for ${collective.id}") *> logger.debug(s"Set notes on item ${item.id} for $collective") *>
itemOps.setNotes(item, notes, collective).void itemOps.setNotes(item, notes, collective).void
case ItemAction.AddNotes(notes, sep) => case ItemAction.AddNotes(notes, sep) =>
logger.debug(s"Add notes on item ${item.id} for ${collective.id}") *> logger.debug(s"Add notes on item ${item.id} for $collective") *>
itemOps.addNotes(item, notes, sep, collective).void itemOps.addNotes(item, notes, sep, collective).void
case ItemAction.SetName(name) => case ItemAction.SetName(name) =>
logger.debug(s"Set name '$name' on item ${item.id} for ${collective.id}") *> logger.debug(s"Set name '$name' on item ${item.id} for $collective") *>
itemOps.setName(item, name, collective).void itemOps.setName(item, name, collective).void
} }
def runAttachAction( def runAttachAction(
collective: Ident, collective: CollectiveId,
itemId: Ident, itemId: Ident,
attachId: Ident, attachId: Ident,
action: AttachmentAction action: AttachmentAction
@ -150,7 +150,7 @@ private[backend] class BackendCommands[F[_]: Sync](
object BackendCommands { object BackendCommands {
/** If supplied, notification events will be send. */ /** If supplied, notification events will be send. */
case class EventContext(account: AccountId, baseUrl: Option[LenientUri]) case class EventContext(account: AccountInfo, baseUrl: Option[LenientUri])
def fromBackend[F[_]: Sync]( def fromBackend[F[_]: Sync](
backendApp: BackendApp[F], backendApp: BackendApp[F],

View File

@ -14,18 +14,18 @@ import docspell.backend.task.DownloadZipArgs
import docspell.common._ import docspell.common._
import docspell.notification.api.PeriodicQueryArgs import docspell.notification.api.PeriodicQueryArgs
import docspell.scheduler.Job import docspell.scheduler.Job
import docspell.scheduler.usertask.UserTaskScope
object JobFactory extends MailAddressCodec { object JobFactory extends MailAddressCodec {
def existingItemAddon[F[_]: Sync]( def existingItemAddon[F[_]: Sync](
args: ItemAddonTaskArgs, args: ItemAddonTaskArgs,
submitter: AccountId submitter: UserTaskScope
): F[Job[ItemAddonTaskArgs]] = ): F[Job[ItemAddonTaskArgs]] =
Job.createNew( Job.createNew(
ItemAddonTaskArgs.taskName, ItemAddonTaskArgs.taskName,
submitter.collective, submitter,
args, args,
"Run addons on item", "Run addons on item",
submitter.user,
Priority.High, Priority.High,
args.addonRunConfigs args.addonRunConfigs
.map(_.take(23)) .map(_.take(23))
@ -39,179 +39,167 @@ object JobFactory extends MailAddressCodec {
def downloadZip[F[_]: Sync]( def downloadZip[F[_]: Sync](
args: DownloadZipArgs, args: DownloadZipArgs,
summaryId: Ident, summaryId: Ident,
submitter: AccountId submitter: UserTaskScope
): F[Job[DownloadZipArgs]] = ): F[Job[DownloadZipArgs]] =
Job.createNew( Job.createNew(
DownloadZipArgs.taskName, DownloadZipArgs.taskName,
submitter.collective, submitter,
args, args,
s"Prepare zip file for query", s"Prepare zip file for query",
submitter.user,
Priority.High, Priority.High,
Some(summaryId) Some(summaryId)
) )
def integrityCheck[F[_]: Sync]( def integrityCheck[F[_]: Sync](
args: FileIntegrityCheckArgs, args: FileIntegrityCheckArgs,
submitter: AccountId = DocspellSystem.account submitter: UserTaskScope = UserTaskScope.system
): F[Job[FileIntegrityCheckArgs]] = ): F[Job[FileIntegrityCheckArgs]] =
Job.createNew( Job.createNew(
FileIntegrityCheckArgs.taskName, FileIntegrityCheckArgs.taskName,
submitter.collective, submitter,
args, args,
s"Check integrity of files", s"Check integrity of files",
submitter.user,
Priority.Low, Priority.Low,
Some(FileIntegrityCheckArgs.taskName) Some(FileIntegrityCheckArgs.taskName)
) )
def fileCopy[F[_]: Sync]( def fileCopy[F[_]: Sync](
args: FileCopyTaskArgs, args: FileCopyTaskArgs,
submitter: AccountId = DocspellSystem.account submitter: UserTaskScope = UserTaskScope.system
): F[Job[FileCopyTaskArgs]] = ): F[Job[FileCopyTaskArgs]] =
Job.createNew( Job.createNew(
FileCopyTaskArgs.taskName, FileCopyTaskArgs.taskName,
submitter.collective, submitter,
args, args,
"Copying all files", "Copying all files",
submitter.user,
Priority.High, Priority.High,
Some(FileCopyTaskArgs.taskName) Some(FileCopyTaskArgs.taskName)
) )
def periodicQuery[F[_]: Sync]( def periodicQuery[F[_]: Sync](
args: PeriodicQueryArgs, args: PeriodicQueryArgs,
submitter: AccountId submitter: UserTaskScope
): F[Job[PeriodicQueryArgs]] = ): F[Job[PeriodicQueryArgs]] =
Job.createNew( Job.createNew(
PeriodicQueryArgs.taskName, PeriodicQueryArgs.taskName,
submitter.collective, submitter,
args, args,
s"Running periodic query, notify via ${args.channels.map(_.channelType)}", s"Running periodic query, notify via ${args.channels.map(_.channelType)}",
submitter.user,
Priority.Low, Priority.Low,
None None
) )
def makePageCount[F[_]: Sync]( def makePageCount[F[_]: Sync](
args: MakePageCountArgs, args: MakePageCountArgs,
account: Option[AccountId] submitter: UserTaskScope
): F[Job[MakePageCountArgs]] = ): F[Job[MakePageCountArgs]] =
Job.createNew( Job.createNew(
MakePageCountArgs.taskName, MakePageCountArgs.taskName,
account.map(_.collective).getOrElse(DocspellSystem.taskGroup), submitter,
args, args,
s"Find page-count metadata for ${args.attachment.id}", s"Find page-count metadata for ${args.attachment.id}",
account.map(_.user).getOrElse(DocspellSystem.user),
Priority.Low, Priority.Low,
Some(MakePageCountArgs.taskName / args.attachment) Some(MakePageCountArgs.taskName / args.attachment)
) )
def makePreview[F[_]: Sync]( def makePreview[F[_]: Sync](
args: MakePreviewArgs, args: MakePreviewArgs,
account: Option[AccountId] submitter: UserTaskScope
): F[Job[MakePreviewArgs]] = ): F[Job[MakePreviewArgs]] =
Job.createNew( Job.createNew(
MakePreviewArgs.taskName, MakePreviewArgs.taskName,
account.map(_.collective).getOrElse(DocspellSystem.taskGroup), submitter,
args, args,
s"Generate preview image", s"Generate preview image",
account.map(_.user).getOrElse(DocspellSystem.user),
Priority.Low, Priority.Low,
Some(MakePreviewArgs.taskName / args.attachment) Some(MakePreviewArgs.taskName / args.attachment)
) )
def allPreviews[F[_]: Sync]( def allPreviews[F[_]: Sync](
args: AllPreviewsArgs, args: AllPreviewsArgs,
submitter: Option[Ident] submitter: UserTaskScope
): F[Job[AllPreviewsArgs]] = ): F[Job[AllPreviewsArgs]] =
Job.createNew( Job.createNew(
AllPreviewsArgs.taskName, AllPreviewsArgs.taskName,
args.collective.getOrElse(DocspellSystem.taskGroup), submitter,
args, args,
"Create preview images", "Create preview images",
submitter.getOrElse(DocspellSystem.user),
Priority.Low, Priority.Low,
Some(DocspellSystem.allPreviewTaskTracker) Some(DocspellSystem.allPreviewTaskTracker)
) )
def convertAllPdfs[F[_]: Sync]( def convertAllPdfs[F[_]: Sync](
collective: Option[Ident], args: ConvertAllPdfArgs,
submitter: Option[Ident], submitter: UserTaskScope,
prio: Priority prio: Priority
): F[Job[ConvertAllPdfArgs]] = ): F[Job[ConvertAllPdfArgs]] =
Job.createNew( Job.createNew(
ConvertAllPdfArgs.taskName, ConvertAllPdfArgs.taskName,
collective.getOrElse(DocspellSystem.taskGroup), submitter,
ConvertAllPdfArgs(collective), args,
s"Convert all pdfs not yet converted", s"Convert all pdfs not yet converted",
submitter.getOrElse(DocspellSystem.user),
prio, prio,
collective args.collective
.map(c => c / ConvertAllPdfArgs.taskName) .map(c => c.valueAsIdent / ConvertAllPdfArgs.taskName)
.orElse(ConvertAllPdfArgs.taskName.some) .orElse(ConvertAllPdfArgs.taskName.some)
) )
def reprocessItem[F[_]: Sync]( def reprocessItem[F[_]: Sync](
args: ReProcessItemArgs, args: ReProcessItemArgs,
account: AccountId, submitter: UserTaskScope,
prio: Priority prio: Priority
): F[Job[ReProcessItemArgs]] = ): F[Job[ReProcessItemArgs]] =
Job.createNew( Job.createNew(
ReProcessItemArgs.taskName, ReProcessItemArgs.taskName,
account.collective, submitter,
args, args,
s"Re-process files of item ${args.itemId.id}", s"Re-process files of item ${args.itemId.id}",
account.user,
prio, prio,
Some(ReProcessItemArgs.taskName / args.itemId) Some(ReProcessItemArgs.taskName / args.itemId)
) )
def multiUpload[F[_]: Sync]( def multiUpload[F[_]: Sync](
args: ProcessItemArgs, args: ProcessItemArgs,
account: AccountId, submitter: UserTaskScope,
prio: Priority, prio: Priority,
tracker: Option[Ident] tracker: Option[Ident]
): F[Job[ProcessItemArgs]] = ): F[Job[ProcessItemArgs]] =
Job.createNew( Job.createNew(
ProcessItemArgs.multiUploadTaskName, ProcessItemArgs.multiUploadTaskName,
account.collective, submitter,
args, args,
args.makeSubject, args.makeSubject,
account.user,
prio, prio,
tracker tracker
) )
def processItem[F[_]: Sync]( def processItem[F[_]: Sync](
args: ProcessItemArgs, args: ProcessItemArgs,
account: AccountId, submitter: UserTaskScope,
prio: Priority, prio: Priority,
tracker: Option[Ident] tracker: Option[Ident]
): F[Job[ProcessItemArgs]] = ): F[Job[ProcessItemArgs]] =
Job.createNew( Job.createNew(
ProcessItemArgs.taskName, ProcessItemArgs.taskName,
account.collective, submitter,
args, args,
args.makeSubject, args.makeSubject,
account.user,
prio, prio,
tracker tracker
) )
def processItems[F[_]: Sync]( def processItems[F[_]: Sync](
args: List[ProcessItemArgs], args: List[ProcessItemArgs],
account: AccountId, submitter: UserTaskScope,
prio: Priority, prio: Priority,
tracker: Option[Ident] tracker: Option[Ident]
): F[List[Job[ProcessItemArgs]]] = { ): F[List[Job[ProcessItemArgs]]] = {
def create(arg: ProcessItemArgs): F[Job[ProcessItemArgs]] = def create(arg: ProcessItemArgs): F[Job[ProcessItemArgs]] =
Job.createNew( Job.createNew(
ProcessItemArgs.taskName, ProcessItemArgs.taskName,
account.collective, submitter,
arg, arg,
arg.makeSubject, arg.makeSubject,
account.user,
prio, prio,
tracker tracker
) )
@ -222,22 +210,23 @@ object JobFactory extends MailAddressCodec {
def reIndexAll[F[_]: Sync]: F[Job[ReIndexTaskArgs]] = def reIndexAll[F[_]: Sync]: F[Job[ReIndexTaskArgs]] =
Job.createNew( Job.createNew(
ReIndexTaskArgs.taskName, ReIndexTaskArgs.taskName,
DocspellSystem.taskGroup, UserTaskScope.system,
ReIndexTaskArgs(None), ReIndexTaskArgs(None),
"Recreate full-text index", "Recreate full-text index",
DocspellSystem.taskGroup,
Priority.Low, Priority.Low,
Some(DocspellSystem.migrationTaskTracker) Some(DocspellSystem.migrationTaskTracker)
) )
def reIndex[F[_]: Sync](account: AccountId): F[Job[ReIndexTaskArgs]] = { def reIndex[F[_]: Sync](
val args = ReIndexTaskArgs(Some(account.collective)) cid: CollectiveId,
submitterUserId: Option[Ident]
): F[Job[ReIndexTaskArgs]] = {
val args = ReIndexTaskArgs(Some(cid))
Job.createNew( Job.createNew(
ReIndexTaskArgs.taskName, ReIndexTaskArgs.taskName,
account.collective, UserTaskScope(cid, submitterUserId),
args, args,
"Recreate full-text index", "Recreate full-text index",
account.user,
Priority.Low, Priority.Low,
Some(ReIndexTaskArgs.tracker(args)) Some(ReIndexTaskArgs.tracker(args))
) )

View File

@ -18,12 +18,13 @@ import scodec.bits.ByteVector
case class AuthToken( case class AuthToken(
nowMillis: Long, nowMillis: Long,
account: AccountId, account: AccountInfo,
requireSecondFactor: Boolean, requireSecondFactor: Boolean,
valid: Option[Duration], valid: Option[Duration],
salt: String, salt: String,
sig: String sig: String
) { ) {
def asString = def asString =
valid match { valid match {
case Some(v) => case Some(v) =>
@ -63,7 +64,7 @@ object AuthToken {
for { for {
millis <- TokenUtil.asInt(ms).toRight("Cannot read authenticator data") millis <- TokenUtil.asInt(ms).toRight("Cannot read authenticator data")
acc <- TokenUtil.b64dec(as).toRight("Cannot read authenticator data") acc <- TokenUtil.b64dec(as).toRight("Cannot read authenticator data")
accId <- AccountId.parse(acc) accId <- AccountInfo.parse(acc)
twofac <- Right[String, Boolean](java.lang.Boolean.parseBoolean(fa)) twofac <- Right[String, Boolean](java.lang.Boolean.parseBoolean(fa))
valid <- TokenUtil valid <- TokenUtil
.asInt(vs) .asInt(vs)
@ -75,7 +76,7 @@ object AuthToken {
for { for {
millis <- TokenUtil.asInt(ms).toRight("Cannot read authenticator data") millis <- TokenUtil.asInt(ms).toRight("Cannot read authenticator data")
acc <- TokenUtil.b64dec(as).toRight("Cannot read authenticator data") acc <- TokenUtil.b64dec(as).toRight("Cannot read authenticator data")
accId <- AccountId.parse(acc) accId <- AccountInfo.parse(acc)
twofac <- Right[String, Boolean](java.lang.Boolean.parseBoolean(fa)) twofac <- Right[String, Boolean](java.lang.Boolean.parseBoolean(fa))
} yield AuthToken(millis, accId, twofac, None, salt, sig) } yield AuthToken(millis, accId, twofac, None, salt, sig)
@ -84,7 +85,7 @@ object AuthToken {
} }
def user[F[_]: Sync]( def user[F[_]: Sync](
accountId: AccountId, accountId: AccountInfo,
requireSecondFactor: Boolean, requireSecondFactor: Boolean,
key: ByteVector, key: ByteVector,
valid: Option[Duration] valid: Option[Duration]

View File

@ -96,10 +96,12 @@ object Login {
for { for {
data <- store.transact(QLogin.findUser(accountId)) data <- store.transact(QLogin.findUser(accountId))
_ <- logF.trace(s"Account lookup: $data") _ <- logF.trace(s"Account lookup: $data")
res <- res <- data match {
if (data.exists(checkNoPassword(_, Set(AccountSource.OpenId)))) case Some(d) if checkNoPassword(d, Set(AccountSource.OpenId)) =>
doLogin(config, accountId, false) doLogin(config, d.account, false)
else Result.invalidAuth.pure[F] case _ =>
Result.invalidAuth.pure[F]
}
} yield res } yield res
def loginSession(config: Config)(sessionKey: String): F[Result] = def loginSession(config: Config)(sessionKey: String): F[Result] =
@ -122,9 +124,12 @@ object Login {
for { for {
data <- store.transact(QLogin.findUser(acc)) data <- store.transact(QLogin.findUser(acc))
_ <- logF.trace(s"Account lookup: $data") _ <- logF.trace(s"Account lookup: $data")
res <- res <- data match {
if (data.exists(check(up.pass))) doLogin(config, acc, up.rememberMe) case Some(d) if check(up.pass)(d) =>
else Result.invalidAuth.pure[F] doLogin(config, d.account, up.rememberMe)
case _ =>
Result.invalidAuth.pure[F]
}
} yield res } yield res
case Left(_) => case Left(_) =>
logF.info(s"User authentication failed for: ${up.hidePass}") *> logF.info(s"User authentication failed for: ${up.hidePass}") *>
@ -162,7 +167,7 @@ object Login {
(for { (for {
_ <- validateToken _ <- validateToken
key <- EitherT.fromOptionF( key <- EitherT.fromOptionF(
store.transact(RTotp.findEnabledByLogin(sf.token.account, true)), store.transact(RTotp.findEnabledByUserId(sf.token.account.userId, true)),
Result.invalidAuth Result.invalidAuth
) )
now <- EitherT.right[Result](Timestamp.current[F]) now <- EitherT.right[Result](Timestamp.current[F])
@ -175,13 +180,13 @@ object Login {
} }
def loginRememberMe(config: Config)(token: String): F[Result] = { def loginRememberMe(config: Config)(token: String): F[Result] = {
def okResult(acc: AccountId) = def okResult(acc: AccountInfo) =
for { for {
_ <- store.transact(RUser.updateLogin(acc)) _ <- store.transact(RUser.updateLogin(acc))
token <- AuthToken.user(acc, false, config.serverSecret, None) token <- AuthToken.user(acc, false, config.serverSecret, None)
} yield Result.ok(token, None) } yield Result.ok(token, None)
def doLogin(rid: Ident) = def rememberedLogin(rid: Ident) =
(for { (for {
now <- OptionT.liftF(Timestamp.current[F]) now <- OptionT.liftF(Timestamp.current[F])
minTime = now - config.rememberMe.valid minTime = now - config.rememberMe.valid
@ -214,7 +219,7 @@ object Login {
else if (rt.isExpired(config.rememberMe.valid)) else if (rt.isExpired(config.rememberMe.valid))
logF.info(s"RememberMe cookie expired ($rt).") *> Result.invalidTime logF.info(s"RememberMe cookie expired ($rt).") *> Result.invalidTime
.pure[F] .pure[F]
else doLogin(rt.rememberId) else rememberedLogin(rt.rememberId)
case Left(err) => case Left(err) =>
logF.info(s"RememberMe cookie was invalid: $err") *> Result.invalidAuth logF.info(s"RememberMe cookie was invalid: $err") *> Result.invalidAuth
.pure[F] .pure[F]
@ -245,11 +250,11 @@ object Login {
private def doLogin( private def doLogin(
config: Config, config: Config,
acc: AccountId, acc: AccountInfo,
rememberMe: Boolean rememberMe: Boolean
): F[Result] = ): F[Result] =
for { for {
require2FA <- store.transact(RTotp.isEnabled(acc)) require2FA <- store.transact(RTotp.isEnabled(acc.userId))
_ <- _ <-
if (require2FA) ().pure[F] if (require2FA) ().pure[F]
else store.transact(RUser.updateLogin(acc)) else store.transact(RUser.updateLogin(acc))
@ -263,13 +268,11 @@ object Login {
private def insertRememberToken( private def insertRememberToken(
store: Store[F], store: Store[F],
acc: AccountId, acc: AccountInfo,
config: Config config: Config
): F[RememberToken] = ): F[RememberToken] =
for { for {
uid <- OptionT(store.transact(RUser.findIdByAccount(acc))) rme <- RRememberMe.generate[F](acc.userId)
.getOrRaise(new IllegalStateException(s"No user_id found for account: $acc"))
rme <- RRememberMe.generate[F](uid)
_ <- store.transact(RRememberMe.insert(rme)) _ <- store.transact(RRememberMe.insert(rme))
token <- RememberToken.user(rme.id, config.serverSecret) token <- RememberToken.user(rme.id, config.serverSecret)
} yield token } yield token

View File

@ -24,7 +24,7 @@ trait CreateIndex[F[_]] {
*/ */
def reIndexData( def reIndexData(
logger: Logger[F], logger: Logger[F],
collective: Option[Ident], collective: Option[CollectiveId],
itemIds: Option[NonEmptyList[Ident]], itemIds: Option[NonEmptyList[Ident]],
chunkSize: Int chunkSize: Int
): F[Unit] ): F[Unit]
@ -40,7 +40,7 @@ object CreateIndex {
new CreateIndex[F] { new CreateIndex[F] {
def reIndexData( def reIndexData(
logger: Logger[F], logger: Logger[F],
collective: Option[Ident], collective: Option[CollectiveId],
itemIds: Option[NonEmptyList[Ident]], itemIds: Option[NonEmptyList[Ident]],
chunkSize: Int chunkSize: Int
): F[Unit] = { ): F[Unit] = {

View File

@ -21,7 +21,7 @@ import docspell.store.queries.QCustomField.FieldValue
import docspell.store.records._ import docspell.store.records._
trait Merge[F[_]] { trait Merge[F[_]] {
def merge(items: NonEmptyList[Ident], collective: Ident): F[Merge.Result[RItem]] def merge(items: NonEmptyList[Ident], collective: CollectiveId): F[Merge.Result[RItem]]
} }
object Merge { object Merge {
@ -41,7 +41,10 @@ object Merge {
createIndex: CreateIndex[F] createIndex: CreateIndex[F]
): Merge[F] = ): Merge[F] =
new Merge[F] { new Merge[F] {
def merge(givenIds: NonEmptyList[Ident], collective: Ident): F[Result[RItem]] = def merge(
givenIds: NonEmptyList[Ident],
collective: CollectiveId
): F[Result[RItem]] =
(for { (for {
items <- loadItems(givenIds, collective) items <- loadItems(givenIds, collective)
ids = items.map(_.id) ids = items.map(_.id)
@ -65,7 +68,7 @@ object Merge {
def loadItems( def loadItems(
items: NonEmptyList[Ident], items: NonEmptyList[Ident],
collective: Ident collective: CollectiveId
): EitherT[F, Error, NonEmptyList[RItem]] = { ): EitherT[F, Error, NonEmptyList[RItem]] = {
val loaded = val loaded =
store store

View File

@ -26,7 +26,7 @@ import docspell.store.records.AddonRunConfigResolved
trait AddonOps[F[_]] { trait AddonOps[F[_]] {
def execAll( def execAll(
collective: Ident, collective: CollectiveId,
trigger: Set[AddonTriggerType], trigger: Set[AddonTriggerType],
runConfigIds: Set[Ident], runConfigIds: Set[Ident],
logger: Option[Logger[F]] logger: Option[Logger[F]]
@ -34,7 +34,7 @@ trait AddonOps[F[_]] {
middleware: Middleware[F] middleware: Middleware[F]
): F[ExecResult] ): F[ExecResult]
def execById(collective: Ident, runConfigId: Ident, logger: Logger[F])( def execById(collective: CollectiveId, runConfigId: Ident, logger: Logger[F])(
middleware: Middleware[F] middleware: Middleware[F]
): F[ExecResult] ): F[ExecResult]
@ -42,13 +42,16 @@ trait AddonOps[F[_]] {
* filtered by given ids and triggers. * filtered by given ids and triggers.
*/ */
def findAddonRefs( def findAddonRefs(
collective: Ident, collective: CollectiveId,
trigger: Set[AddonTriggerType], trigger: Set[AddonTriggerType],
runConfigIds: Set[Ident] runConfigIds: Set[Ident]
): F[List[AddonRunConfigRef]] ): F[List[AddonRunConfigRef]]
/** Find enabled addon run config reference given an addon task id */ /** Find enabled addon run config reference given an addon task id */
def findAddonRef(collective: Ident, runConfigId: Ident): F[Option[AddonRunConfigRef]] def findAddonRef(
collective: CollectiveId,
runConfigId: Ident
): F[Option[AddonRunConfigRef]]
/** Creates an executor for addons given a configuration. */ /** Creates an executor for addons given a configuration. */
def getExecutor(cfg: AddonExecutorConfig): F[AddonExecutor[F]] def getExecutor(cfg: AddonExecutorConfig): F[AddonExecutor[F]]
@ -58,7 +61,7 @@ trait AddonOps[F[_]] {
object AddonOps { object AddonOps {
case class AddonRunConfigRef( case class AddonRunConfigRef(
id: Ident, id: Ident,
collective: Ident, collective: CollectiveId,
userId: Option[Ident], userId: Option[Ident],
name: String, name: String,
refs: List[AddonRef] refs: List[AddonRef]
@ -110,7 +113,7 @@ object AddonOps {
private val prepare = new AddonPrepare[F](store) private val prepare = new AddonPrepare[F](store)
def execAll( def execAll(
collective: Ident, collective: CollectiveId,
trigger: Set[AddonTriggerType], trigger: Set[AddonTriggerType],
runConfigIds: Set[Ident], runConfigIds: Set[Ident],
logger: Option[Logger[F]] logger: Option[Logger[F]]
@ -125,7 +128,7 @@ object AddonOps {
results <- runCfgs.traverse(r => execRunConfig(log, r, custom)) results <- runCfgs.traverse(r => execRunConfig(log, r, custom))
} yield ExecResult(results.flatMap(_.result), runCfgs) } yield ExecResult(results.flatMap(_.result), runCfgs)
def execById(collective: Ident, runConfigId: Ident, logger: Logger[F])( def execById(collective: CollectiveId, runConfigId: Ident, logger: Logger[F])(
custom: Middleware[F] custom: Middleware[F]
): F[ExecResult] = ): F[ExecResult] =
(for { (for {
@ -167,7 +170,7 @@ object AddonOps {
Async[F].pure(AddonExecutor(cfg, urlReader)) Async[F].pure(AddonExecutor(cfg, urlReader))
def findAddonRefs( def findAddonRefs(
collective: Ident, collective: CollectiveId,
trigger: Set[AddonTriggerType], trigger: Set[AddonTriggerType],
runConfigIds: Set[Ident] runConfigIds: Set[Ident]
): F[List[AddonRunConfigRef]] = ): F[List[AddonRunConfigRef]] =
@ -183,7 +186,7 @@ object AddonOps {
.map(_.map(AddonRunConfigRef.fromResolved)) .map(_.map(AddonRunConfigRef.fromResolved))
def findAddonRef( def findAddonRef(
collective: Ident, collective: CollectiveId,
runConfigId: Ident runConfigId: Ident
): F[Option[AddonRunConfigRef]] = ): F[Option[AddonRunConfigRef]] =
OptionT( OptionT(

View File

@ -20,6 +20,7 @@ import docspell.common.bc.BackendCommandRunner
import docspell.common.syntax.file._ import docspell.common.syntax.file._
import docspell.logging.Logger import docspell.logging.Logger
import docspell.scheduler.JobStore import docspell.scheduler.JobStore
import docspell.scheduler.usertask.UserTaskScope
import docspell.store.Store import docspell.store.Store
import docspell.store.records._ import docspell.store.records._
@ -32,7 +33,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files](
def onResult( def onResult(
logger: Logger[F], logger: Logger[F],
collective: Ident, collective: CollectiveId,
result: AddonExecutionResult, result: AddonExecutionResult,
outputDir: Path outputDir: Path
): F[Unit] = ): F[Unit] =
@ -45,7 +46,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files](
def onSuccess( def onSuccess(
logger: Logger[F], logger: Logger[F],
collective: Ident, collective: CollectiveId,
output: AddonOutput, output: AddonOutput,
outputDir: Path outputDir: Path
): F[Unit] = ): F[Unit] =
@ -60,7 +61,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files](
def submitNewItem( def submitNewItem(
logger: Logger[F], logger: Logger[F],
collective: Ident, collective: CollectiveId,
outputDir: Path outputDir: Path
)(newItem: NewItem): F[Unit] = )(newItem: NewItem): F[Unit] =
for { for {
@ -85,13 +86,17 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files](
newItem.toProcessMeta(collective, collLang, "addon"), newItem.toProcessMeta(collective, collLang, "addon"),
uploaded.map(f => ProcessItemArgs.File(f._1.some, f._2)) uploaded.map(f => ProcessItemArgs.File(f._1.some, f._2))
) )
account = AccountId(collective, DocspellSystem.user) job <- JobFactory.processItem(
job <- JobFactory.processItem(args, account, Priority.High, None) args,
UserTaskScope.collective(collective),
Priority.High,
None
)
_ <- jobStore.insert(job.encode) _ <- jobStore.insert(job.encode)
_ <- logger.debug(s"Submitted job for processing: ${job.id}") _ <- logger.debug(s"Submitted job for processing: ${job.id}")
} yield () } yield ()
def updateOne(logger: Logger[F], collective: Ident, outputDir: Path)( def updateOne(logger: Logger[F], collective: CollectiveId, outputDir: Path)(
itemFile: ItemFile itemFile: ItemFile
): F[Unit] = ): F[Unit] =
for { for {
@ -123,7 +128,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files](
def submitNewFiles( def submitNewFiles(
logger: Logger[F], logger: Logger[F],
collective: Ident, collective: CollectiveId,
outputDir: Path outputDir: Path
)(itemFile: ItemFile): F[Unit] = )(itemFile: ItemFile): F[Unit] =
for { for {
@ -131,7 +136,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files](
collLang <- store.transact(RCollective.findLanguage(collective)) collLang <- store.transact(RCollective.findLanguage(collective))
newFiles <- itemFile.resolveNewFiles(logger, outputDir) newFiles <- itemFile.resolveNewFiles(logger, outputDir)
byMeta = newFiles.groupBy(_._1.metadata).view.mapValues(_.map(_._2)) byMeta = newFiles.groupBy(_._1.metadata).view.mapValues(_.map(_._2))
account = AccountId(collective, DocspellSystem.user) submitter = UserTaskScope.collective(collective)
_ <- byMeta.toList.traverse_ { case (meta, files) => _ <- byMeta.toList.traverse_ { case (meta, files) =>
for { for {
uploaded <- files.traverse(file => uploaded <- files.traverse(file =>
@ -151,7 +156,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files](
meta.toProcessMeta(collective, itemFile.itemId, collLang, "addon"), meta.toProcessMeta(collective, itemFile.itemId, collLang, "addon"),
uploaded.map(f => ProcessItemArgs.File(f._1.some, f._2)) uploaded.map(f => ProcessItemArgs.File(f._1.some, f._2))
) )
job <- JobFactory.processItem(args, account, Priority.High, None) job <- JobFactory.processItem(args, submitter, Priority.High, None)
_ <- jobStore.insert(job.encode) _ <- jobStore.insert(job.encode)
_ <- logger.debug(s"Submitted job for processing: ${job.id}") _ <- logger.debug(s"Submitted job for processing: ${job.id}")
} yield () } yield ()
@ -168,19 +173,29 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files](
.semiflatMap(run) .semiflatMap(run)
.getOrElseF(logger.warn(s"Cannot find attachment for $key to update text!")) .getOrElseF(logger.warn(s"Cannot find attachment for $key to update text!"))
private def setText(collective: Ident, ra: RAttachment, readText: F[String]): F[Unit] = private def setText(
collective: CollectiveId,
ra: RAttachment,
readText: F[String]
): F[Unit] =
attachOps.setExtractedText(collective, ra.itemId, ra.id, readText) attachOps.setExtractedText(collective, ra.itemId, ra.id, readText)
private def replacePdf( private def replacePdf(
collective: Ident, collective: CollectiveId,
ra: RAttachment, ra: RAttachment,
file: Path, file: Path,
generatePreview: Boolean generatePreview: Boolean
): F[Unit] = ): F[Unit] =
attachOps.addOrReplacePdf(collective, ra.id, file.readAll, generatePreview) attachOps.addOrReplacePdf(
collective,
ra.id,
file.readAll,
generatePreview,
UserTaskScope.collective(collective)
)
private def replacePreview( private def replacePreview(
collective: Ident, collective: CollectiveId,
attachId: Ident, attachId: Ident,
imageData: Path imageData: Path
): F[Unit] = ): F[Unit] =

View File

@ -16,7 +16,8 @@ import docspell.backend.joex.AddonOps.AddonRunConfigRef
import docspell.common._ import docspell.common._
import docspell.logging.Logger import docspell.logging.Logger
import docspell.store.Store import docspell.store.Store
import docspell.store.records.{RNode, RUser} import docspell.store.queries.QLogin
import docspell.store.records.RNode
import scodec.bits.ByteVector import scodec.bits.ByteVector
@ -46,8 +47,7 @@ private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExte
): F[Middleware[F]] = ): F[Middleware[F]] =
(for { (for {
userId <- OptionT.fromOption[F](runConfigRef.userId) userId <- OptionT.fromOption[F](runConfigRef.userId)
user <- OptionT(store.transact(RUser.getIdByIdOrLogin(userId))) account <- OptionT(store.transact(QLogin.findUser(userId))).map(_.account)
account = AccountId(runConfigRef.collective, user.login)
env = env =
Middleware.prepare[F]( Middleware.prepare[F](
Kleisli(input => makeDscEnv(account, tokenValidity).map(input.addEnv)) Kleisli(input => makeDscEnv(account, tokenValidity).map(input.addEnv))
@ -58,7 +58,7 @@ private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExte
* Additionally a random rest-server is looked up from the database to set its url. * Additionally a random rest-server is looked up from the database to set its url.
*/ */
def makeDscEnv( def makeDscEnv(
accountId: AccountId, account: AccountInfo,
tokenValidity: Duration tokenValidity: Duration
): F[Map[String, String]] = ): F[Map[String, String]] =
for { for {
@ -71,7 +71,7 @@ private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExte
secret = serverNode.flatMap(_.serverSecret) secret = serverNode.flatMap(_.serverSecret)
token <- AuthToken.user( token <- AuthToken.user(
accountId, account,
false, false,
secret.getOrElse(ByteVector.empty), secret.getOrElse(ByteVector.empty),
tokenValidity.some tokenValidity.some

View File

@ -0,0 +1,23 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.backend.joex
import docspell.common.AccountId
import docspell.scheduler.FindJobOwner
import docspell.store.Store
import docspell.store.queries.QLogin
/** Finds the job submitter account by using the group as collective and submitter as
* login.
*/
object FindJobOwnerAccount {
def apply[F[_]](store: Store[F]): FindJobOwner[F] =
FindJobOwner.of { job =>
val accountId = AccountId(job.group, job.submitter)
store.transact(QLogin.findAccount(accountId))
}
}

View File

@ -12,13 +12,13 @@ import cats.syntax.all._
import docspell.backend.ops.AddonRunConfigError._ import docspell.backend.ops.AddonRunConfigError._
import docspell.backend.ops.OAddons.{AddonRunConfigResult, AddonRunInsert} import docspell.backend.ops.OAddons.{AddonRunConfigResult, AddonRunInsert}
import docspell.common.Ident import docspell.common.CollectiveId
import docspell.store.Store import docspell.store.Store
import docspell.store.records.RAddonArchive import docspell.store.records.RAddonArchive
object AddonRunConfigValidate { object AddonRunConfigValidate {
def apply[F[_]: Sync](store: Store[F], cid: Ident)( def apply[F[_]: Sync](store: Store[F], cid: CollectiveId)(
cfg: AddonRunInsert cfg: AddonRunInsert
): F[AddonRunConfigResult[AddonRunInsert]] = { ): F[AddonRunConfigResult[AddonRunInsert]] = {
val init: AddonRunConfigResult[Unit] = ().asRight val init: AddonRunConfigResult[Unit] = ().asRight
@ -31,7 +31,7 @@ object AddonRunConfigValidate {
.map(_.as(cfg)) .map(_.as(cfg))
} }
def checkTriggers[F[_]: Sync](store: Store[F], cid: Ident)( def checkTriggers[F[_]: Sync](store: Store[F], cid: CollectiveId)(
cfg: AddonRunInsert cfg: AddonRunInsert
): F[AddonRunConfigResult[Unit]] = ): F[AddonRunConfigResult[Unit]] =
for { for {

View File

@ -16,7 +16,7 @@ import docspell.addons.{AddonMeta, RunnerType}
import docspell.backend.Config import docspell.backend.Config
import docspell.backend.ops.AddonValidationError._ import docspell.backend.ops.AddonValidationError._
import docspell.backend.ops.OAddons.AddonValidationResult import docspell.backend.ops.OAddons.AddonValidationResult
import docspell.common.{Ident, LenientUri, UrlReader} import docspell.common.{CollectiveId, LenientUri, UrlReader}
import docspell.joexapi.model.AddonSupport import docspell.joexapi.model.AddonSupport
import docspell.store.Store import docspell.store.Store
import docspell.store.records.RAddonArchive import docspell.store.records.RAddonArchive
@ -29,7 +29,7 @@ final class AddonValidate[F[_]: Async](
private[this] val logger = docspell.logging.getLogger[F] private[this] val logger = docspell.logging.getLogger[F]
def fromUrl( def fromUrl(
collective: Ident, collective: CollectiveId,
url: LenientUri, url: LenientUri,
reader: UrlReader[F], reader: UrlReader[F],
localUrl: Option[LenientUri] = None, localUrl: Option[LenientUri] = None,
@ -47,7 +47,7 @@ final class AddonValidate[F[_]: Async](
else archive(collective, reader(localUrl.getOrElse(url)).asRight, checkExisting) else archive(collective, reader(localUrl.getOrElse(url)).asRight, checkExisting)
def archive( def archive(
collective: Ident, collective: CollectiveId,
addonData: Either[Path, Stream[F, Byte]], addonData: Either[Path, Stream[F, Byte]],
checkExisting: Boolean = true checkExisting: Boolean = true
): F[AddonValidationResult[AddonMeta]] = ): F[AddonValidationResult[AddonMeta]] =

View File

@ -30,45 +30,46 @@ trait OAddons[F[_]] {
* exists. * exists.
*/ */
def registerAddon( def registerAddon(
collective: Ident, collective: CollectiveId,
url: LenientUri, url: LenientUri,
logger: Option[Logger[F]] logger: Option[Logger[F]]
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] ): F[AddonValidationResult[(RAddonArchive, AddonMeta)]]
/** Refreshes an existing addon by downloading it again and updating metadata. */ /** Refreshes an existing addon by downloading it again and updating metadata. */
def refreshAddon( def refreshAddon(
collective: Ident, collective: CollectiveId,
addonId: Ident addonId: Ident
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] ): F[AddonValidationResult[(RAddonArchive, AddonMeta)]]
/** Look into the addon at the given url and return its metadata. */ /** Look into the addon at the given url and return its metadata. */
def inspectAddon( def inspectAddon(
collective: Ident, collective: CollectiveId,
url: LenientUri url: LenientUri
): F[AddonValidationResult[AddonMeta]] ): F[AddonValidationResult[AddonMeta]]
/** Deletes the addon if it exists. */ /** Deletes the addon if it exists. */
def deleteAddon(collective: Ident, addonId: Ident): F[Boolean] def deleteAddon(collective: CollectiveId, addonId: Ident): F[Boolean]
def getAllAddons(collective: Ident): F[List[RAddonArchive]] def getAllAddons(collective: CollectiveId): F[List[RAddonArchive]]
/** Inserts or updates the addon run configuration. If it already exists (and the given /** Inserts or updates the addon run configuration. If it already exists (and the given
* id is non empty), it will be completely replaced with the given one. * id is non empty), it will be completely replaced with the given one.
*/ */
def upsertAddonRunConfig( def upsertAddonRunConfig(
collective: Ident, collective: CollectiveId,
runConfig: AddonRunInsert runConfig: AddonRunInsert
): F[AddonRunConfigResult[Ident]] ): F[AddonRunConfigResult[Ident]]
/** Deletes this task from the database. */ /** Deletes this task from the database. */
def deleteAddonRunConfig(collective: Ident, runConfigId: Ident): F[Boolean] def deleteAddonRunConfig(collective: CollectiveId, runConfigId: Ident): F[Boolean]
def getAllAddonRunConfigs(collective: Ident): F[List[AddonRunInfo]] def getAllAddonRunConfigs(collective: CollectiveId): F[List[AddonRunInfo]]
def runAddonForItem( def runAddonForItem(
account: AccountId, cid: CollectiveId,
itemIds: NonEmptyList[Ident], itemIds: NonEmptyList[Ident],
addonRunConfigIds: Set[Ident] addonRunConfigIds: Set[Ident],
submitter: UserTaskScope
): F[Unit] ): F[Unit]
} }
@ -141,7 +142,7 @@ object OAddons {
private val zip = MimeType.zip.asString private val zip = MimeType.zip.asString
private val addonValidate = new AddonValidate[F](cfg, store, joex) private val addonValidate = new AddonValidate[F](cfg, store, joex)
def getAllAddonRunConfigs(collective: Ident): F[List[AddonRunInfo]] = def getAllAddonRunConfigs(collective: CollectiveId): F[List[AddonRunInfo]] =
for { for {
all <- store.transact(AddonRunConfigData.findAll(collective)) all <- store.transact(AddonRunConfigData.findAll(collective))
runConfigIDs = all.map(_.runConfig.id).toSet runConfigIDs = all.map(_.runConfig.id).toSet
@ -168,7 +169,7 @@ object OAddons {
} yield result } yield result
def upsertAddonRunConfig( def upsertAddonRunConfig(
collective: Ident, collective: CollectiveId,
runConfig: AddonRunInsert runConfig: AddonRunInsert
): F[AddonRunConfigResult[Ident]] = { ): F[AddonRunConfigResult[Ident]] = {
val insertDataRaw = AddonRunConfigData( val insertDataRaw = AddonRunConfigData(
@ -246,7 +247,10 @@ object OAddons {
.value .value
} }
def deleteAddonRunConfig(collective: Ident, runConfigId: Ident): F[Boolean] = { def deleteAddonRunConfig(
collective: CollectiveId,
runConfigId: Ident
): F[Boolean] = {
val deleteRunConfig = val deleteRunConfig =
(for { (for {
e <- OptionT(RAddonRunConfig.findById(collective, runConfigId)) e <- OptionT(RAddonRunConfig.findById(collective, runConfigId))
@ -264,20 +268,20 @@ object OAddons {
} yield deleted } yield deleted
} }
def getAllAddons(collective: Ident): F[List[RAddonArchive]] = def getAllAddons(collective: CollectiveId): F[List[RAddonArchive]] =
store.transact(RAddonArchive.listAll(collective)) store.transact(RAddonArchive.listAll(collective))
def deleteAddon(collective: Ident, addonId: Ident): F[Boolean] = def deleteAddon(collective: CollectiveId, addonId: Ident): F[Boolean] =
store.transact(RAddonArchive.deleteById(collective, addonId)).map(_ > 0) store.transact(RAddonArchive.deleteById(collective, addonId)).map(_ > 0)
def inspectAddon( def inspectAddon(
collective: Ident, collective: CollectiveId,
url: LenientUri url: LenientUri
): F[AddonValidationResult[AddonMeta]] = ): F[AddonValidationResult[AddonMeta]] =
addonValidate.fromUrl(collective, url, urlReader, checkExisting = false) addonValidate.fromUrl(collective, url, urlReader, checkExisting = false)
def registerAddon( def registerAddon(
collective: Ident, collective: CollectiveId,
url: LenientUri, url: LenientUri,
logger: Option[Logger[F]] logger: Option[Logger[F]]
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] = { ): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] = {
@ -294,7 +298,9 @@ object OAddons {
.as(AddonValidationResult.failure[(RAddonArchive, AddonMeta)](error)) .as(AddonValidationResult.failure[(RAddonArchive, AddonMeta)](error))
} }
log.info(s"Store addon file from '${url.asString} for ${collective.id}") *> log.info(
s"Store addon file from '${url.asString} for collective ${collective.value}"
) *>
storeAddonFromUrl(collective, url).flatMapF { file => storeAddonFromUrl(collective, url).flatMapF { file =>
val localUrl = FileUrlReader.url(file) val localUrl = FileUrlReader.url(file)
for { for {
@ -306,7 +312,7 @@ object OAddons {
} }
def refreshAddon( def refreshAddon(
collective: Ident, collective: CollectiveId,
addonId: Ident addonId: Ident
): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] = { ): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] = {
val findAddon = store val findAddon = store
@ -371,7 +377,7 @@ object OAddons {
} }
private def insertAddon( private def insertAddon(
collective: Ident, collective: CollectiveId,
url: LenientUri, url: LenientUri,
meta: AddonMeta, meta: AddonMeta,
file: FileKey file: FileKey
@ -392,7 +398,7 @@ object OAddons {
.onError(_ => store.fileRepo.delete(file)) .onError(_ => store.fileRepo.delete(file))
} yield record } yield record
private def storeAddonFromUrl(collective: Ident, url: LenientUri) = private def storeAddonFromUrl(collective: CollectiveId, url: LenientUri) =
for { for {
urlFile <- EitherT.pure(url.path.segments.lastOption) urlFile <- EitherT.pure(url.path.segments.lastOption)
file <- EitherT( file <- EitherT(
@ -412,15 +418,16 @@ object OAddons {
} yield file } yield file
def runAddonForItem( def runAddonForItem(
account: AccountId, cid: CollectiveId,
itemIds: NonEmptyList[Ident], itemIds: NonEmptyList[Ident],
addonRunConfigIds: Set[Ident] addonRunConfigIds: Set[Ident],
submitter: UserTaskScope
): F[Unit] = ): F[Unit] =
for { for {
jobs <- itemIds.traverse(id => jobs <- itemIds.traverse(id =>
JobFactory.existingItemAddon( JobFactory.existingItemAddon(
ItemAddonTaskArgs(account.collective, id, addonRunConfigIds), ItemAddonTaskArgs(cid, id, addonRunConfigIds),
account submitter
) )
) )
_ <- jobStore.insertAllIfNew(jobs.map(_.encode).toList) _ <- jobStore.insertAllIfNew(jobs.map(_.encode).toList)

View File

@ -17,6 +17,7 @@ import docspell.common._
import docspell.files.TikaMimetype import docspell.files.TikaMimetype
import docspell.ftsclient.{FtsClient, TextData} import docspell.ftsclient.{FtsClient, TextData}
import docspell.scheduler.JobStore import docspell.scheduler.JobStore
import docspell.scheduler.usertask.UserTaskScope
import docspell.store.Store import docspell.store.Store
import docspell.store.queries.QAttachment import docspell.store.queries.QAttachment
import docspell.store.records._ import docspell.store.records._
@ -24,21 +25,22 @@ import docspell.store.records._
trait OAttachment[F[_]] { trait OAttachment[F[_]] {
def setExtractedText( def setExtractedText(
collective: Ident, collective: CollectiveId,
itemId: Ident, itemId: Ident,
attachId: Ident, attachId: Ident,
newText: F[String] newText: F[String]
): F[Unit] ): F[Unit]
def addOrReplacePdf( def addOrReplacePdf(
collective: Ident, collective: CollectiveId,
attachId: Ident, attachId: Ident,
pdfData: Stream[F, Byte], pdfData: Stream[F, Byte],
regeneratePreview: Boolean regeneratePreview: Boolean,
submitter: UserTaskScope
): F[Unit] ): F[Unit]
def addOrReplacePreview( def addOrReplacePreview(
collective: Ident, collective: CollectiveId,
attachId: Ident, attachId: Ident,
imageData: Stream[F, Byte] imageData: Stream[F, Byte]
): F[Unit] ): F[Unit]
@ -55,7 +57,7 @@ object OAttachment {
private[this] val logger = docspell.logging.getLogger[F] private[this] val logger = docspell.logging.getLogger[F]
def setExtractedText( def setExtractedText(
collective: Ident, collective: CollectiveId,
itemId: Ident, itemId: Ident,
attachId: Ident, attachId: Ident,
newText: F[String] newText: F[String]
@ -104,24 +106,22 @@ object OAttachment {
} yield () } yield ()
def addOrReplacePdf( def addOrReplacePdf(
collective: Ident, collective: CollectiveId,
attachId: Ident, attachId: Ident,
pdfData: Stream[F, Byte], pdfData: Stream[F, Byte],
regeneratePreview: Boolean regeneratePreview: Boolean,
submitter: UserTaskScope
): F[Unit] = { ): F[Unit] = {
def generatePreview(ra: RAttachment): F[Unit] = def generatePreview(ra: RAttachment): F[Unit] =
JobFactory JobFactory
.makePreview(MakePreviewArgs(ra.id, StoreMode.Replace), None) .makePreview(MakePreviewArgs(ra.id, StoreMode.Replace), submitter)
.map(_.encode) .map(_.encode)
.flatMap(jobStore.insert) *> .flatMap(jobStore.insert) *>
logger.info(s"Job submitted to re-generate preview from new pdf") logger.info(s"Job submitted to re-generate preview from new pdf")
def generatePageCount(ra: RAttachment): F[Unit] = def generatePageCount(ra: RAttachment): F[Unit] =
JobFactory JobFactory
.makePageCount( .makePageCount(MakePageCountArgs(ra.id), submitter)
MakePageCountArgs(ra.id),
AccountId(collective, DocspellSystem.user).some
)
.map(_.encode) .map(_.encode)
.flatMap(jobStore.insert) *> .flatMap(jobStore.insert) *>
logger.info(s"Job submitted to find page count from new pdf") logger.info(s"Job submitted to find page count from new pdf")
@ -168,7 +168,7 @@ object OAttachment {
} }
def addOrReplacePreview( def addOrReplacePreview(
collective: Ident, collective: CollectiveId,
attachId: Ident, attachId: Ident,
imageData: Stream[F, Byte] imageData: Stream[F, Byte]
): F[Unit] = { ): F[Unit] = {

View File

@ -11,29 +11,31 @@ import cats.data.OptionT
import cats.effect.{Async, Resource} import cats.effect.{Async, Resource}
import cats.implicits._ import cats.implicits._
import docspell.common.AccountId
import docspell.common._ import docspell.common._
import docspell.store.Store import docspell.store.Store
import docspell.store.records.RClientSettingsCollective import docspell.store.records.RClientSettingsCollective
import docspell.store.records.RClientSettingsUser import docspell.store.records.RClientSettingsUser
import docspell.store.records.RUser
import io.circe.Json import io.circe.Json
trait OClientSettings[F[_]] { trait OClientSettings[F[_]] {
def deleteUser(clientId: Ident, account: AccountId): F[Boolean] def deleteUser(clientId: Ident, userId: Ident): F[Boolean]
def saveUser(clientId: Ident, account: AccountId, data: Json): F[Unit] def saveUser(clientId: Ident, userId: Ident, data: Json): F[Unit]
def loadUser(clientId: Ident, account: AccountId): F[Option[RClientSettingsUser]] def loadUser(clientId: Ident, userId: Ident): F[Option[RClientSettingsUser]]
def deleteCollective(clientId: Ident, account: AccountId): F[Boolean] def deleteCollective(clientId: Ident, collectiveId: CollectiveId): F[Boolean]
def saveCollective(clientId: Ident, account: AccountId, data: Json): F[Unit] def saveCollective(clientId: Ident, collectiveId: CollectiveId, data: Json): F[Unit]
def loadCollective( def loadCollective(
clientId: Ident, clientId: Ident,
account: AccountId collectiveId: CollectiveId
): F[Option[RClientSettingsCollective]] ): F[Option[RClientSettingsCollective]]
def loadMerged(clientId: Ident, account: AccountId): F[Option[Json]] def loadMerged(
clientId: Ident,
collectiveId: CollectiveId,
userId: Ident
): F[Option[Json]]
} }
object OClientSettings { object OClientSettings {
@ -41,22 +43,18 @@ object OClientSettings {
Resource.pure[F, OClientSettings[F]](new OClientSettings[F] { Resource.pure[F, OClientSettings[F]](new OClientSettings[F] {
val log = docspell.logging.getLogger[F] val log = docspell.logging.getLogger[F]
private def getUserId(account: AccountId): OptionT[F, Ident] = def deleteCollective(clientId: Ident, collectiveId: CollectiveId): F[Boolean] =
OptionT(store.transact(RUser.findByAccount(account))).map(_.uid)
def deleteCollective(clientId: Ident, account: AccountId): F[Boolean] =
store store
.transact(RClientSettingsCollective.delete(clientId, account.collective)) .transact(RClientSettingsCollective.delete(clientId, collectiveId))
.map(_ > 0) .map(_ > 0)
def deleteUser(clientId: Ident, account: AccountId): F[Boolean] = def deleteUser(clientId: Ident, userId: Ident): F[Boolean] =
(for { (for {
_ <- OptionT.liftF( _ <- OptionT.liftF(
log.debug( log.debug(
s"Deleting client settings for client ${clientId.id} and account $account" s"Deleting client settings for client ${clientId.id} and user ${userId.id}"
) )
) )
userId <- getUserId(account)
n <- OptionT.liftF( n <- OptionT.liftF(
store.transact( store.transact(
RClientSettingsUser.delete(clientId, userId) RClientSettingsUser.delete(clientId, userId)
@ -64,24 +62,27 @@ object OClientSettings {
) )
} yield n > 0).getOrElse(false) } yield n > 0).getOrElse(false)
def saveCollective(clientId: Ident, account: AccountId, data: Json): F[Unit] = def saveCollective(
clientId: Ident,
collectiveId: CollectiveId,
data: Json
): F[Unit] =
for { for {
n <- store.transact( n <- store.transact(
RClientSettingsCollective.upsert(clientId, account.collective, data) RClientSettingsCollective.upsert(clientId, collectiveId, data)
) )
_ <- _ <-
if (n <= 0) Async[F].raiseError(new IllegalStateException("No rows updated!")) if (n <= 0) Async[F].raiseError(new IllegalStateException("No rows updated!"))
else ().pure[F] else ().pure[F]
} yield () } yield ()
def saveUser(clientId: Ident, account: AccountId, data: Json): F[Unit] = def saveUser(clientId: Ident, userId: Ident, data: Json): F[Unit] =
(for { (for {
_ <- OptionT.liftF( _ <- OptionT.liftF(
log.debug( log.debug(
s"Storing client settings for client ${clientId.id} and account $account" s"Storing client settings for client ${clientId.id} and user ${userId.id}"
) )
) )
userId <- getUserId(account)
n <- OptionT.liftF( n <- OptionT.liftF(
store.transact(RClientSettingsUser.upsert(clientId, userId, data)) store.transact(RClientSettingsUser.upsert(clientId, userId, data))
) )
@ -93,25 +94,24 @@ object OClientSettings {
def loadCollective( def loadCollective(
clientId: Ident, clientId: Ident,
account: AccountId collectiveId: CollectiveId
): F[Option[RClientSettingsCollective]] = ): F[Option[RClientSettingsCollective]] =
store.transact(RClientSettingsCollective.find(clientId, account.collective)) store.transact(RClientSettingsCollective.find(clientId, collectiveId))
def loadUser(clientId: Ident, account: AccountId): F[Option[RClientSettingsUser]] = def loadUser(clientId: Ident, userId: Ident): F[Option[RClientSettingsUser]] =
(for { (for {
_ <- OptionT.liftF( _ <- OptionT.liftF(
log.debug( log.debug(
s"Loading client settings for client ${clientId.id} and account $account" s"Loading client settings for client ${clientId.id} and user ${userId.id}"
) )
) )
userId <- getUserId(account)
data <- OptionT(store.transact(RClientSettingsUser.find(clientId, userId))) data <- OptionT(store.transact(RClientSettingsUser.find(clientId, userId)))
} yield data).value } yield data).value
def loadMerged(clientId: Ident, account: AccountId) = def loadMerged(clientId: Ident, collectiveId: CollectiveId, userId: Ident) =
for { for {
collData <- loadCollective(clientId, account) collData <- loadCollective(clientId, collectiveId)
userData <- loadUser(clientId, account) userData <- loadUser(clientId, userId)
mergedData = collData.map(_.settingsData) |+| userData.map(_.settingsData) mergedData = collData.map(_.settingsData) |+| userData.map(_.settingsData)
} yield mergedData } yield mergedData

View File

@ -6,6 +6,7 @@
package docspell.backend.ops package docspell.backend.ops
import cats.data.OptionT
import cats.effect.{Async, Resource} import cats.effect.{Async, Resource}
import cats.implicits._ import cats.implicits._
import fs2.Stream import fs2.Stream
@ -27,28 +28,32 @@ trait OCollective[F[_]] {
def find(name: Ident): F[Option[RCollective]] def find(name: Ident): F[Option[RCollective]]
def updateSettings(collective: Ident, settings: OCollective.Settings): F[AddResult] def updateSettings(
collective: CollectiveId,
settings: OCollective.Settings
): F[AddResult]
def findSettings(collective: Ident): F[Option[OCollective.Settings]] def findSettings(collective: CollectiveId): F[Option[OCollective.Settings]]
def listUser(collective: Ident): F[Vector[RUser]] def listUser(collective: CollectiveId): F[Vector[RUser]]
def add(s: RUser): F[AddResult] def add(s: RUser): F[AddResult]
def update(s: RUser): F[AddResult] def update(s: RUser): F[AddResult]
/** Deletes the user and all its data. */ /** Deletes the user and all its data. */
def deleteUser(login: Ident, collective: Ident): F[UpdateResult] def deleteUser(userId: Ident): F[UpdateResult]
/** Return an excerpt of what would be deleted, when the user is deleted. */ /** Return an excerpt of what would be deleted, when the user is deleted. */
def getDeleteUserData(accountId: AccountId): F[DeleteUserData] def getDeleteUserData(cid: CollectiveId, userId: Ident): F[DeleteUserData]
def insights(collective: Ident): F[InsightData] def insights(collective: CollectiveId): F[InsightData]
def tagCloud(collective: Ident): F[List[TagCount]] def tagCloud(collective: CollectiveId): F[List[TagCount]]
def changePassword( def changePassword(
accountId: AccountId, collectiveId: CollectiveId,
userId: Ident,
current: Password, current: Password,
newPass: Password newPass: Password
): F[PassChangeResult] ): F[PassChangeResult]
@ -56,20 +61,21 @@ trait OCollective[F[_]] {
def resetPassword(accountId: AccountId): F[PassResetResult] def resetPassword(accountId: AccountId): F[PassResetResult]
def getContacts( def getContacts(
collective: Ident, collective: CollectiveId,
query: Option[String], query: Option[String],
kind: Option[ContactKind] kind: Option[ContactKind]
): Stream[F, RContact] ): Stream[F, RContact]
def findEnabledSource(sourceId: Ident): F[Option[RSource]] def findEnabledSource(sourceId: Ident): F[Option[RSource]]
def addPassword(collective: Ident, pw: Password): F[Unit] def addPassword(collective: CollectiveId, pw: Password): F[Unit]
def getPasswords(collective: Ident): F[List[RCollectivePassword]] def getPasswords(collective: CollectiveId): F[List[RCollectivePassword]]
/** Removes a password from the list given the id of `RCollectivePassword` */
def removePassword(id: Ident): F[Unit] def removePassword(id: Ident): F[Unit]
def startLearnClassifier(collective: Ident): F[Unit] def startLearnClassifier(collective: CollectiveId): F[Unit]
def startEmptyTrash(args: EmptyTrashArgs): F[Unit] def startEmptyTrash(args: EmptyTrashArgs): F[Unit]
@ -78,7 +84,8 @@ trait OCollective[F[_]] {
*/ */
def generatePreviews( def generatePreviews(
storeMode: MakePreviewArgs.StoreMode, storeMode: MakePreviewArgs.StoreMode,
account: AccountId collectiveId: CollectiveId,
submitter: UserTaskScope
): F[UpdateResult] ): F[UpdateResult]
} }
@ -137,26 +144,32 @@ object OCollective {
): Resource[F, OCollective[F]] = ): Resource[F, OCollective[F]] =
Resource.pure[F, OCollective[F]](new OCollective[F] { Resource.pure[F, OCollective[F]](new OCollective[F] {
def find(name: Ident): F[Option[RCollective]] = def find(name: Ident): F[Option[RCollective]] =
store.transact(RCollective.findById(name)) store.transact(RCollective.findByName(name))
def updateSettings(collective: Ident, sett: Settings): F[AddResult] = def updateSettings(
collectiveId: CollectiveId,
sett: Settings
): F[AddResult] =
store store
.transact(RCollective.updateSettings(collective, sett)) .transact(RCollective.updateSettings(collectiveId, sett))
.attempt .attempt
.map(AddResult.fromUpdate) .map(AddResult.fromUpdate)
.flatMap(res => .flatMap(res =>
updateLearnClassifierTask(collective, sett) *> updateEmptyTrashTask( updateLearnClassifierTask(collectiveId, sett) *> updateEmptyTrashTask(
collective, collectiveId,
sett sett
) *> res.pure[F] ) *> res.pure[F]
) )
private def updateLearnClassifierTask(coll: Ident, sett: Settings): F[Unit] = private def updateLearnClassifierTask(
cid: CollectiveId,
sett: Settings
): F[Unit] =
for { for {
id <- Ident.randomId[F] id <- Ident.randomId[F]
on = sett.classifier.exists(_.enabled) on = sett.classifier.exists(_.enabled)
timer = sett.classifier.map(_.schedule).getOrElse(CalEvent.unsafe("")) timer = sett.classifier.map(_.schedule).getOrElse(CalEvent.unsafe(""))
args = LearnClassifierArgs(coll) args = LearnClassifierArgs(cid)
ut = UserTask( ut = UserTask(
id, id,
LearnClassifierArgs.taskName, LearnClassifierArgs.taskName,
@ -165,36 +178,41 @@ object OCollective {
None, None,
args args
) )
_ <- uts.updateOneTask(UserTaskScope(coll), args.makeSubject.some, ut) _ <- uts.updateOneTask(UserTaskScope.collective(cid), args.makeSubject.some, ut)
_ <- joex.notifyAllNodes _ <- joex.notifyAllNodes
} yield () } yield ()
private def updateEmptyTrashTask(coll: Ident, sett: Settings): F[Unit] = private def updateEmptyTrashTask(
cid: CollectiveId,
sett: Settings
): F[Unit] =
for { for {
id <- Ident.randomId[F] id <- Ident.randomId[F]
settings = sett.emptyTrash.getOrElse(EmptyTrash.default) settings = sett.emptyTrash.getOrElse(EmptyTrash.default)
args = EmptyTrashArgs(coll, settings.minAge) args = EmptyTrashArgs(cid, settings.minAge)
ut = UserTask(id, EmptyTrashArgs.taskName, true, settings.schedule, None, args) ut = UserTask(id, EmptyTrashArgs.taskName, true, settings.schedule, None, args)
_ <- uts.updateOneTask(UserTaskScope(coll), args.makeSubject.some, ut) _ <- uts.updateOneTask(UserTaskScope.collective(cid), args.makeSubject.some, ut)
_ <- joex.notifyAllNodes _ <- joex.notifyAllNodes
} yield () } yield ()
def addPassword(collective: Ident, pw: Password): F[Unit] = def addPassword(collective: CollectiveId, pw: Password): F[Unit] =
for { for {
cpass <- RCollectivePassword.createNew[F](collective, pw) cpass <- RCollectivePassword.createNew[F](collective, pw)
_ <- store.transact(RCollectivePassword.upsert(cpass)) _ <- store.transact(RCollectivePassword.upsert(cpass))
} yield () } yield ()
def getPasswords(collective: Ident): F[List[RCollectivePassword]] = def getPasswords(collective: CollectiveId): F[List[RCollectivePassword]] =
store.transact(RCollectivePassword.findAll(collective)) store.transact(RCollectivePassword.findAll(collective))
def removePassword(id: Ident): F[Unit] = def removePassword(id: Ident): F[Unit] =
store.transact(RCollectivePassword.deleteById(id)).map(_ => ()) store.transact(RCollectivePassword.deleteById(id)).map(_ => ())
def startLearnClassifier(collective: Ident): F[Unit] = def startLearnClassifier(
collectiveId: CollectiveId
): F[Unit] =
for { for {
id <- Ident.randomId[F] id <- Ident.randomId[F]
args = LearnClassifierArgs(collective) args = LearnClassifierArgs(collectiveId)
ut = UserTask( ut = UserTask(
id, id,
LearnClassifierArgs.taskName, LearnClassifierArgs.taskName,
@ -204,7 +222,11 @@ object OCollective {
args args
) )
_ <- uts _ <- uts
.executeNow(UserTaskScope(collective), args.makeSubject.some, ut) .executeNow(
UserTaskScope.collective(args.collectiveId),
args.makeSubject.some,
ut
)
} yield () } yield ()
def startEmptyTrash(args: EmptyTrashArgs): F[Unit] = def startEmptyTrash(args: EmptyTrashArgs): F[Unit] =
@ -219,13 +241,17 @@ object OCollective {
args args
) )
_ <- uts _ <- uts
.executeNow(UserTaskScope(args.collective), args.makeSubject.some, ut) .executeNow(
UserTaskScope.collective(args.collective),
args.makeSubject.some,
ut
)
} yield () } yield ()
def findSettings(collective: Ident): F[Option[OCollective.Settings]] = def findSettings(collective: CollectiveId): F[Option[OCollective.Settings]] =
store.transact(RCollective.getSettings(collective)) store.transact(RCollective.getSettings(collective))
def listUser(collective: Ident): F[Vector[RUser]] = def listUser(collective: CollectiveId): F[Vector[RUser]] =
store.transact(RUser.findAll(collective, _.login)) store.transact(RUser.findAll(collective, _.login))
def add(s: RUser): F[AddResult] = def add(s: RUser): F[AddResult] =
@ -240,47 +266,48 @@ object OCollective {
def update(s: RUser): F[AddResult] = def update(s: RUser): F[AddResult] =
store.add(RUser.update(s), RUser.exists(s.login)) store.add(RUser.update(s), RUser.exists(s.login))
def getDeleteUserData(accountId: AccountId): F[DeleteUserData] = def getDeleteUserData(cid: CollectiveId, userId: Ident): F[DeleteUserData] =
store.transact(QUser.getUserData(accountId)) store.transact(QUser.getUserData(cid, userId))
def deleteUser(login: Ident, collective: Ident): F[UpdateResult] = def deleteUser(userId: Ident): F[UpdateResult] =
UpdateResult.fromUpdate( UpdateResult.fromUpdate(
store.transact(QUser.deleteUserAndData(AccountId(collective, login))) store.transact(QUser.deleteUserAndData(userId))
) )
def insights(collective: Ident): F[InsightData] = def insights(collective: CollectiveId): F[InsightData] =
store.transact(QCollective.getInsights(collective)) store.transact(QCollective.getInsights(collective))
def tagCloud(collective: Ident): F[List[TagCount]] = def tagCloud(collective: CollectiveId): F[List[TagCount]] =
store.transact(QCollective.tagCloud(collective)) store.transact(QCollective.tagCloud(collective))
def resetPassword(accountId: AccountId): F[PassResetResult] = def resetPassword(accountId: AccountId): F[PassResetResult] =
for { (for {
newPass <- Password.generate[F] user <- OptionT(store.transact(RUser.findByAccount(accountId)))
optUser <- store.transact(RUser.findByAccount(accountId)) newPass <- OptionT.liftF(Password.generate[F])
n <- store.transact(
RUser.updatePassword(accountId, PasswordCrypt.crypt(newPass)) doUpdate = store.transact(
RUser.updatePassword(user.cid, user.uid, PasswordCrypt.crypt(newPass))
) )
res = res <-
if (optUser.exists(_.source != AccountSource.Local)) if (user.source != AccountSource.Local)
PassResetResult.userNotLocal OptionT.pure[F](PassResetResult.userNotLocal)
else if (n <= 0) PassResetResult.notFound else OptionT.liftF(doUpdate.as(PassResetResult.success(newPass)))
else PassResetResult.success(newPass) } yield res).getOrElse(PassResetResult.notFound)
} yield res
def changePassword( def changePassword(
accountId: AccountId, collectiveId: CollectiveId,
userId: Ident,
current: Password, current: Password,
newPass: Password newPass: Password
): F[PassChangeResult] = { ): F[PassChangeResult] = {
val q = for { val q = for {
optUser <- RUser.findByAccount(accountId) optUser <- RUser.findById(userId, collectiveId.some)
check = optUser.map(_.password).map(p => PasswordCrypt.check(current, p)) check = optUser.map(_.password).map(p => PasswordCrypt.check(current, p))
n <- n <-
check check
.filter(identity) .filter(identity)
.traverse(_ => .traverse(_ =>
RUser.updatePassword(accountId, PasswordCrypt.crypt(newPass)) RUser.updatePassword(collectiveId, userId, PasswordCrypt.crypt(newPass))
) )
res = check match { res = check match {
case Some(true) => case Some(true) =>
@ -299,7 +326,7 @@ object OCollective {
} }
def getContacts( def getContacts(
collective: Ident, collective: CollectiveId,
query: Option[String], query: Option[String],
kind: Option[ContactKind] kind: Option[ContactKind]
): Stream[F, RContact] = ): Stream[F, RContact] =
@ -310,12 +337,13 @@ object OCollective {
def generatePreviews( def generatePreviews(
storeMode: MakePreviewArgs.StoreMode, storeMode: MakePreviewArgs.StoreMode,
account: AccountId collectiveId: CollectiveId,
submitter: UserTaskScope
): F[UpdateResult] = ): F[UpdateResult] =
for { for {
job <- JobFactory.allPreviews[F]( job <- JobFactory.allPreviews[F](
AllPreviewsArgs(Some(account.collective), storeMode), AllPreviewsArgs(Some(collectiveId), storeMode),
Some(account.user) submitter
) )
_ <- jobStore.insertIfNew(job.encode) _ <- jobStore.insertIfNew(job.encode)
} yield UpdateResult.success } yield UpdateResult.success

View File

@ -36,13 +36,13 @@ trait OCustomFields[F[_]] {
/** Find all fields using an optional query on the name and label */ /** Find all fields using an optional query on the name and label */
def findAll( def findAll(
coll: Ident, coll: CollectiveId,
nameQuery: Option[String], nameQuery: Option[String],
order: CustomFieldOrder order: CustomFieldOrder
): F[Vector[CustomFieldData]] ): F[Vector[CustomFieldData]]
/** Find one field by its id */ /** Find one field by its id */
def findById(coll: Ident, fieldId: Ident): F[Option[CustomFieldData]] def findById(coll: CollectiveId, fieldId: Ident): F[Option[CustomFieldData]]
/** Create a new non-existing field. */ /** Create a new non-existing field. */
def create(field: NewCustomField): F[AddResult] def create(field: NewCustomField): F[AddResult]
@ -51,7 +51,7 @@ trait OCustomFields[F[_]] {
def change(field: RCustomField): F[UpdateResult] def change(field: RCustomField): F[UpdateResult]
/** Deletes the field by name or id. */ /** Deletes the field by name or id. */
def delete(coll: Ident, fieldIdOrName: Ident): F[UpdateResult] def delete(coll: CollectiveId, fieldIdOrName: Ident): F[UpdateResult]
/** Sets a value given a field an an item. Existing values are overwritten. */ /** Sets a value given a field an an item. Existing values are overwritten. */
def setValue(item: Ident, value: SetValue): F[AttachedEvent[SetValueResult]] def setValue(item: Ident, value: SetValue): F[AttachedEvent[SetValueResult]]
@ -80,13 +80,13 @@ object OCustomFields {
name: Ident, name: Ident,
label: Option[String], label: Option[String],
ftype: CustomFieldType, ftype: CustomFieldType,
cid: Ident cid: CollectiveId
) )
case class SetValue( case class SetValue(
field: Ident, field: Ident,
value: String, value: String,
collective: Ident collective: CollectiveId
) )
sealed trait SetValueResult sealed trait SetValueResult
@ -106,7 +106,7 @@ object OCustomFields {
case class RemoveValue( case class RemoveValue(
field: Ident, field: Ident,
item: Nel[Ident], item: Nel[Ident],
collective: Ident collective: CollectiveId
) )
sealed trait CustomFieldOrder sealed trait CustomFieldOrder
@ -158,7 +158,7 @@ object OCustomFields {
store.transact(QCustomField.findAllValues(itemIds)) store.transact(QCustomField.findAllValues(itemIds))
def findAll( def findAll(
coll: Ident, coll: CollectiveId,
nameQuery: Option[String], nameQuery: Option[String],
order: CustomFieldOrder order: CustomFieldOrder
): F[Vector[CustomFieldData]] = ): F[Vector[CustomFieldData]] =
@ -170,7 +170,7 @@ object OCustomFields {
) )
) )
def findById(coll: Ident, field: Ident): F[Option[CustomFieldData]] = def findById(coll: CollectiveId, field: Ident): F[Option[CustomFieldData]] =
store.transact(QCustomField.findById(field, coll)) store.transact(QCustomField.findById(field, coll))
def create(field: NewCustomField): F[AddResult] = { def create(field: NewCustomField): F[AddResult] = {
@ -188,7 +188,7 @@ object OCustomFields {
def change(field: RCustomField): F[UpdateResult] = def change(field: RCustomField): F[UpdateResult] =
UpdateResult.fromUpdate(store.transact(RCustomField.update(field))) UpdateResult.fromUpdate(store.transact(RCustomField.update(field)))
def delete(coll: Ident, fieldIdOrName: Ident): F[UpdateResult] = { def delete(coll: CollectiveId, fieldIdOrName: Ident): F[UpdateResult] = {
val update = val update =
for { for {
field <- OptionT(RCustomField.findByIdOrName(fieldIdOrName, coll)) field <- OptionT(RCustomField.findByIdOrName(fieldIdOrName, coll))

View File

@ -21,6 +21,7 @@ import docspell.common._
import docspell.query.ItemQuery.Expr.ValidItemStates import docspell.query.ItemQuery.Expr.ValidItemStates
import docspell.query.{ItemQuery, ItemQueryParser} import docspell.query.{ItemQuery, ItemQueryParser}
import docspell.scheduler.JobStore import docspell.scheduler.JobStore
import docspell.scheduler.usertask.UserTaskScope
import docspell.store.Store import docspell.store.Store
import docspell.store.file.FileMetadata import docspell.store.file.FileMetadata
import docspell.store.queries.{QItem, Query} import docspell.store.queries.{QItem, Query}
@ -34,18 +35,18 @@ trait ODownloadAll[F[_]] {
/** Calculates what kind of zip file would be created and checks the server thresholds. /** Calculates what kind of zip file would be created and checks the server thresholds.
*/ */
def getSummary(account: AccountId, req: DownloadRequest): F[DownloadSummary] def getSummary(account: AccountInfo, req: DownloadRequest): F[DownloadSummary]
/** Same as `getSummary` but also submits the job to really create the zip file if /** Same as `getSummary` but also submits the job to really create the zip file if
* allowed and necessary. * allowed and necessary.
*/ */
def submit(accountId: AccountId, req: DownloadRequest): F[DownloadSummary] def submit(accountId: AccountInfo, req: DownloadRequest): F[DownloadSummary]
/** Given the id from the summary, cancels a running job. */ /** Given the id from the summary, cancels a running job. */
def cancelDownload(accountId: AccountId, id: Ident): F[OJob.JobCancelResult] def cancelDownload(cid: CollectiveId, id: Ident): F[OJob.JobCancelResult]
/** Returns the file if it is present, given a summary id. */ /** Returns the file if it is present, given a summary id. */
def getFile(collective: Ident, id: Ident): F[Option[DownloadAllFile[F]]] def getFile(collective: CollectiveId, id: Ident): F[Option[DownloadAllFile[F]]]
/** Deletes a download archive given it's id. */ /** Deletes a download archive given it's id. */
def deleteFile(id: Ident): F[Unit] def deleteFile(id: Ident): F[Unit]
@ -65,7 +66,7 @@ object ODownloadAll {
private[this] val logger = docspell.logging.getLogger[F] private[this] val logger = docspell.logging.getLogger[F]
def getSummary( def getSummary(
account: AccountId, account: AccountInfo,
req: DownloadRequest req: DownloadRequest
): F[DownloadSummary] = { ): F[DownloadSummary] = {
val query = req.toQuery(account) val query = req.toQuery(account)
@ -83,16 +84,16 @@ object ODownloadAll {
} }
def submit( def submit(
accountId: AccountId, account: AccountInfo,
req: DownloadRequest req: DownloadRequest
): F[DownloadSummary] = for { ): F[DownloadSummary] = for {
_ <- logger.info(s"Download all request: $req") _ <- logger.info(s"Download all request: $req")
summary <- getSummary(accountId, req) summary <- getSummary(account, req)
args = DownloadZipArgs(accountId, req) args = DownloadZipArgs(account, req)
_ <- OptionT _ <- OptionT
.whenF(summary.state == DownloadState.NotPresent) { .whenF(summary.state == DownloadState.NotPresent) {
JobFactory JobFactory
.downloadZip(args, summary.id, accountId) .downloadZip(args, summary.id, UserTaskScope(account))
.flatMap(job => .flatMap(job =>
logger.info(s"Submitting download all job: $job") *> jobStore logger.info(s"Submitting download all job: $job") *> jobStore
.insertIfNew(job.encode) .insertIfNew(job.encode)
@ -124,7 +125,7 @@ object ODownloadAll {
else DownloadState.NotPresent else DownloadState.NotPresent
} yield state } yield state
def getFile(collective: Ident, id: Ident) = def getFile(collective: CollectiveId, id: Ident) =
OptionT(store.transact(RDownloadQuery.findById(id))) OptionT(store.transact(RDownloadQuery.findById(id)))
.map(_._2) .map(_._2)
.map(md => .map(md =>
@ -158,10 +159,10 @@ object ODownloadAll {
_ <- store.fileRepo.delete(fkey) _ <- store.fileRepo.delete(fkey)
} yield () } yield ()
def cancelDownload(accountId: AccountId, id: Ident) = def cancelDownload(cid: CollectiveId, id: Ident) =
OptionT(store.transact(RDownloadQuery.findById(id))) OptionT(store.transact(RDownloadQuery.findById(id)))
.flatMap(t => OptionT(store.transact(RJob.findNonFinalByTracker(t._1.id)))) .flatMap(t => OptionT(store.transact(RJob.findNonFinalByTracker(t._1.id))))
.semiflatMap(job => jobs.cancelJob(job.id, accountId.collective)) .semiflatMap(job => jobs.cancelJob(job.id, UserTaskScope.collective(cid)))
.getOrElse(JobCancelResult.jobNotFound) .getOrElse(JobCancelResult.jobNotFound)
} }
@ -173,9 +174,9 @@ object ODownloadAll {
maxFiles: Int, maxFiles: Int,
maxSize: ByteSize maxSize: ByteSize
) { ) {
def toQuery(accountId: AccountId): Query = def toQuery(account: AccountInfo): Query =
Query Query
.all(accountId) .all(account)
.withFix(_.andQuery(ValidItemStates)) .withFix(_.andQuery(ValidItemStates))
.withCond(_ => Query.QueryExpr(query.expr)) .withCond(_ => Query.QueryExpr(query.expr))

View File

@ -10,25 +10,25 @@ import cats.data.NonEmptyList
import cats.effect.{Async, Resource} import cats.effect.{Async, Resource}
import cats.implicits._ import cats.implicits._
import docspell.common.{AccountId, Ident} import docspell.common._
import docspell.store.records.{REquipment, RItem} import docspell.store.records.{REquipment, RItem}
import docspell.store.{AddResult, Store} import docspell.store.{AddResult, Store}
trait OEquipment[F[_]] { trait OEquipment[F[_]] {
def findAll( def findAll(
account: AccountId, collectiveId: CollectiveId,
nameQuery: Option[String], nameQuery: Option[String],
order: OEquipment.EquipmentOrder order: OEquipment.EquipmentOrder
): F[Vector[REquipment]] ): F[Vector[REquipment]]
def find(account: AccountId, id: Ident): F[Option[REquipment]] def find(collectiveId: CollectiveId, id: Ident): F[Option[REquipment]]
def add(s: REquipment): F[AddResult] def add(s: REquipment): F[AddResult]
def update(s: REquipment): F[AddResult] def update(s: REquipment): F[AddResult]
def delete(id: Ident, collective: Ident): F[AddResult] def delete(id: Ident, collective: CollectiveId): F[AddResult]
} }
object OEquipment { object OEquipment {
@ -58,16 +58,16 @@ object OEquipment {
def apply[F[_]: Async](store: Store[F]): Resource[F, OEquipment[F]] = def apply[F[_]: Async](store: Store[F]): Resource[F, OEquipment[F]] =
Resource.pure[F, OEquipment[F]](new OEquipment[F] { Resource.pure[F, OEquipment[F]](new OEquipment[F] {
def findAll( def findAll(
account: AccountId, collective: CollectiveId,
nameQuery: Option[String], nameQuery: Option[String],
order: EquipmentOrder order: EquipmentOrder
): F[Vector[REquipment]] = ): F[Vector[REquipment]] =
store.transact( store.transact(
REquipment.findAll(account.collective, nameQuery, EquipmentOrder(order)) REquipment.findAll(collective, nameQuery, EquipmentOrder(order))
) )
def find(account: AccountId, id: Ident): F[Option[REquipment]] = def find(cid: CollectiveId, id: Ident): F[Option[REquipment]] =
store.transact(REquipment.findById(id)).map(_.filter(_.cid == account.collective)) store.transact(REquipment.findById(id)).map(_.filter(_.cid == cid))
def add(e: REquipment): F[AddResult] = { def add(e: REquipment): F[AddResult] = {
def insert = REquipment.insert(e) def insert = REquipment.insert(e)
@ -85,7 +85,7 @@ object OEquipment {
store.add(insert, exists).map(_.fold(identity, _.withMsg(msg), identity)) store.add(insert, exists).map(_.fold(identity, _.withMsg(msg), identity))
} }
def delete(id: Ident, collective: Ident): F[AddResult] = { def delete(id: Ident, collective: CollectiveId): F[AddResult] = {
val io = for { val io = for {
n0 <- RItem.removeConcEquip(collective, id) n0 <- RItem.removeConcEquip(collective, id)
n1 <- REquipment.delete(id, collective) n1 <- REquipment.delete(id, collective)

View File

@ -8,6 +8,7 @@ package docspell.backend.ops
import cats.data.{NonEmptyList => Nel} import cats.data.{NonEmptyList => Nel}
import cats.effect._ import cats.effect._
import cats.syntax.option._
import docspell.common._ import docspell.common._
import docspell.store.queries.QFolder import docspell.store.queries.QFolder
@ -17,38 +18,43 @@ import docspell.store.{AddResult, Store}
trait OFolder[F[_]] { trait OFolder[F[_]] {
def findAll( def findAll(
account: AccountId, collectiveId: CollectiveId,
userId: Ident,
ownerLogin: Option[Ident], ownerLogin: Option[Ident],
query: Option[String], query: Option[String],
order: OFolder.FolderOrder order: OFolder.FolderOrder
): F[Vector[OFolder.FolderItem]] ): F[Vector[OFolder.FolderItem]]
def findById(id: Ident, account: AccountId): F[Option[OFolder.FolderDetail]] def findById(
id: Ident,
collectiveId: CollectiveId,
userId: Ident
): F[Option[OFolder.FolderDetail]]
/** Adds a new folder. If `login` is non-empty, the `folder.user` property is ignored /** Adds a new folder. If `login` is non-empty, the `folder.owner` property is ignored
* and the user-id is determined by the given login name. * and its value is determined by the given login name.
*/ */
def add(folder: RFolder, login: Option[Ident]): F[AddResult] def add(folder: RFolder, login: Option[Ident]): F[AddResult]
def changeName( def changeName(
folder: Ident, folder: Ident,
account: AccountId, userId: Ident,
name: String name: String
): F[OFolder.FolderChangeResult] ): F[OFolder.FolderChangeResult]
def addMember( def addMember(
folder: Ident, folder: Ident,
account: AccountId, userId: Ident,
member: Ident member: Ident
): F[OFolder.FolderChangeResult] ): F[OFolder.FolderChangeResult]
def removeMember( def removeMember(
folder: Ident, folder: Ident,
account: AccountId, userId: Ident,
member: Ident member: Ident
): F[OFolder.FolderChangeResult] ): F[OFolder.FolderChangeResult]
def delete(id: Ident, account: AccountId): F[OFolder.FolderChangeResult] def delete(id: Ident, userId: Ident): F[OFolder.FolderChangeResult]
} }
object OFolder { object OFolder {
@ -94,23 +100,35 @@ object OFolder {
def apply[F[_]](store: Store[F]): Resource[F, OFolder[F]] = def apply[F[_]](store: Store[F]): Resource[F, OFolder[F]] =
Resource.pure[F, OFolder[F]](new OFolder[F] { Resource.pure[F, OFolder[F]](new OFolder[F] {
def findAll( def findAll(
account: AccountId, collectiveId: CollectiveId,
userId: Ident,
ownerLogin: Option[Ident], ownerLogin: Option[Ident],
query: Option[String], query: Option[String],
order: FolderOrder order: FolderOrder
): F[Vector[FolderItem]] = ): F[Vector[FolderItem]] =
store.transact( store.transact(
QFolder.findAll(account, None, ownerLogin, query, FolderOrder(order)) QFolder.findAll(
collectiveId,
userId,
None,
ownerLogin,
query,
FolderOrder(order)
)
) )
def findById(id: Ident, account: AccountId): F[Option[FolderDetail]] = def findById(
store.transact(QFolder.findById(id, account)) id: Ident,
collectiveId: CollectiveId,
userId: Ident
): F[Option[FolderDetail]] =
store.transact(QFolder.findById(id, collectiveId, userId))
def add(folder: RFolder, login: Option[Ident]): F[AddResult] = { def add(folder: RFolder, login: Option[Ident]): F[AddResult] = {
val insert = login match { val insert = login match {
case Some(n) => case Some(userLogin) =>
for { for {
user <- RUser.findByAccount(AccountId(folder.collectiveId, n)) user <- RUser.findByLogin(userLogin, folder.collectiveId.some)
s = user.map(u => folder.copy(owner = u.uid)).getOrElse(folder) s = user.map(u => folder.copy(owner = u.uid)).getOrElse(folder)
n <- RFolder.insert(s) n <- RFolder.insert(s)
} yield n } yield n
@ -124,26 +142,26 @@ object OFolder {
def changeName( def changeName(
folder: Ident, folder: Ident,
account: AccountId, userId: Ident,
name: String name: String
): F[FolderChangeResult] = ): F[FolderChangeResult] =
store.transact(QFolder.changeName(folder, account, name)) store.transact(QFolder.changeName(folder, userId, name))
def addMember( def addMember(
folder: Ident, folder: Ident,
account: AccountId, userId: Ident,
member: Ident member: Ident
): F[FolderChangeResult] = ): F[FolderChangeResult] =
store.transact(QFolder.addMember(folder, account, member)) store.transact(QFolder.addMember(folder, userId, member))
def removeMember( def removeMember(
folder: Ident, folder: Ident,
account: AccountId, userId: Ident,
member: Ident member: Ident
): F[FolderChangeResult] = ): F[FolderChangeResult] =
store.transact(QFolder.removeMember(folder, account, member)) store.transact(QFolder.removeMember(folder, userId, member))
def delete(id: Ident, account: AccountId): F[FolderChangeResult] = def delete(id: Ident, userId: Ident): F[FolderChangeResult] =
store.transact(QFolder.delete(id, account)) store.transact(QFolder.delete(id, userId))
}) })
} }

View File

@ -23,7 +23,7 @@ trait OFulltext[F[_]] {
/** Clears the full-text index for the given collective and starts a task indexing all /** Clears the full-text index for the given collective and starts a task indexing all
* their data. * their data.
*/ */
def reindexCollective(account: AccountId): F[Unit] def reindexCollective(cid: CollectiveId, submitterUserId: Option[Ident]): F[Unit]
} }
object OFulltext { object OFulltext {
@ -40,13 +40,13 @@ object OFulltext {
_ <- jobStore.insertIfNew(job.encode) _ <- jobStore.insertIfNew(job.encode)
} yield () } yield ()
def reindexCollective(account: AccountId): F[Unit] = def reindexCollective(cid: CollectiveId, submitterUserId: Option[Ident]): F[Unit] =
for { for {
_ <- logger.debug(s"Re-index collective: $account") _ <- logger.debug(s"Re-index collective: $cid")
exist <- store.transact( exist <- store.transact(
RJob.findNonFinalByTracker(DocspellSystem.migrationTaskTracker) RJob.findNonFinalByTracker(DocspellSystem.migrationTaskTracker)
) )
job <- JobFactory.reIndex(account) job <- JobFactory.reIndex(cid, submitterUserId)
_ <- _ <-
if (exist.isDefined) ().pure[F] if (exist.isDefined) ().pure[F]
else jobStore.insertIfNew(job.encode) else jobStore.insertIfNew(job.encode)

View File

@ -19,6 +19,7 @@ import docspell.ftsclient.FtsClient
import docspell.logging.Logger import docspell.logging.Logger
import docspell.notification.api.Event import docspell.notification.api.Event
import docspell.scheduler.JobStore import docspell.scheduler.JobStore
import docspell.scheduler.usertask.UserTaskScope
import docspell.store.queries.{QAttachment, QItem, QMoveAttachment} import docspell.store.queries.{QAttachment, QItem, QMoveAttachment}
import docspell.store.records._ import docspell.store.records._
import docspell.store.{AddResult, Store, UpdateResult} import docspell.store.{AddResult, Store, UpdateResult}
@ -31,7 +32,7 @@ trait OItem[F[_]] {
def setTags( def setTags(
item: Ident, item: Ident,
tagIds: List[String], tagIds: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] ): F[AttachedEvent[UpdateResult]]
/** Sets tags for multiple items. The tags of the items will be replaced with the given /** Sets tags for multiple items. The tags of the items will be replaced with the given
@ -40,11 +41,15 @@ trait OItem[F[_]] {
def setTagsMultipleItems( def setTagsMultipleItems(
items: Nel[Ident], items: Nel[Ident],
tags: List[String], tags: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] ): F[AttachedEvent[UpdateResult]]
/** Create a new tag and add it to the item. */ /** Create a new tag and add it to the item. */
def addNewTag(collective: Ident, item: Ident, tag: RTag): F[AttachedEvent[AddResult]] def addNewTag(
collective: CollectiveId,
item: Ident,
tag: RTag
): F[AttachedEvent[AddResult]]
/** Apply all tags to the given item. Tags must exist, but can be IDs or names. Existing /** Apply all tags to the given item. Tags must exist, but can be IDs or names. Existing
* tags on the item are left unchanged. * tags on the item are left unchanged.
@ -52,54 +57,58 @@ trait OItem[F[_]] {
def linkTags( def linkTags(
item: Ident, item: Ident,
tags: List[String], tags: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] ): F[AttachedEvent[UpdateResult]]
def linkTagsMultipleItems( def linkTagsMultipleItems(
items: Nel[Ident], items: Nel[Ident],
tags: List[String], tags: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] ): F[AttachedEvent[UpdateResult]]
def removeTagsOfCategories( def removeTagsOfCategories(
item: Ident, item: Ident,
collective: Ident, collective: CollectiveId,
categories: Set[String] categories: Set[String]
): F[AttachedEvent[UpdateResult]] ): F[AttachedEvent[UpdateResult]]
def removeTagsMultipleItems( def removeTagsMultipleItems(
items: Nel[Ident], items: Nel[Ident],
tags: List[String], tags: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] ): F[AttachedEvent[UpdateResult]]
/** Toggles tags of the given item. Tags must exist, but can be IDs or names. */ /** Toggles tags of the given item. Tags must exist, but can be IDs or names. */
def toggleTags( def toggleTags(
item: Ident, item: Ident,
tags: List[String], tags: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] ): F[AttachedEvent[UpdateResult]]
def setDirection( def setDirection(
item: Nel[Ident], item: Nel[Ident],
direction: Direction, direction: Direction,
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
/** Set or remove the folder on an item. Folder can be the id or name. */ /** Set or remove the folder on an item. Folder can be the id or name. */
def setFolder(item: Ident, folder: Option[String], collective: Ident): F[UpdateResult] def setFolder(
item: Ident,
folder: Option[String],
collective: CollectiveId
): F[UpdateResult]
/** Set or remove the folder on multiple items. Folder can be the id or name. */ /** Set or remove the folder on multiple items. Folder can be the id or name. */
def setFolderMultiple( def setFolderMultiple(
items: Nel[Ident], items: Nel[Ident],
folder: Option[String], folder: Option[String],
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
def setCorrOrg( def setCorrOrg(
items: Nel[Ident], items: Nel[Ident],
org: Option[Ident], org: Option[Ident],
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
def addCorrOrg(item: Ident, org: OOrganization.OrgAndContacts): F[AddResult] def addCorrOrg(item: Ident, org: OOrganization.OrgAndContacts): F[AddResult]
@ -107,7 +116,7 @@ trait OItem[F[_]] {
def setCorrPerson( def setCorrPerson(
items: Nel[Ident], items: Nel[Ident],
person: Option[Ident], person: Option[Ident],
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
def addCorrPerson(item: Ident, person: OOrganization.PersonAndContacts): F[AddResult] def addCorrPerson(item: Ident, person: OOrganization.PersonAndContacts): F[AddResult]
@ -115,7 +124,7 @@ trait OItem[F[_]] {
def setConcPerson( def setConcPerson(
items: Nel[Ident], items: Nel[Ident],
person: Option[Ident], person: Option[Ident],
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
def addConcPerson(item: Ident, person: OOrganization.PersonAndContacts): F[AddResult] def addConcPerson(item: Ident, person: OOrganization.PersonAndContacts): F[AddResult]
@ -123,64 +132,68 @@ trait OItem[F[_]] {
def setConcEquip( def setConcEquip(
items: Nel[Ident], items: Nel[Ident],
equip: Option[Ident], equip: Option[Ident],
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
def addConcEquip(item: Ident, equip: REquipment): F[AddResult] def addConcEquip(item: Ident, equip: REquipment): F[AddResult]
def setNotes(item: Ident, notes: Option[String], collective: Ident): F[UpdateResult] def setNotes(
item: Ident,
notes: Option[String],
collective: CollectiveId
): F[UpdateResult]
def addNotes( def addNotes(
item: Ident, item: Ident,
notes: String, notes: String,
separator: Option[String], separator: Option[String],
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
def setName(item: Ident, name: String, collective: Ident): F[UpdateResult] def setName(item: Ident, name: String, collective: CollectiveId): F[UpdateResult]
def setNameMultiple( def setNameMultiple(
items: Nel[Ident], items: Nel[Ident],
name: String, name: String,
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
def setState(item: Ident, state: ItemState, collective: Ident): F[AddResult] = def setState(item: Ident, state: ItemState, collective: CollectiveId): F[AddResult] =
setStates(Nel.of(item), state, collective) setStates(Nel.of(item), state, collective)
def setStates( def setStates(
item: Nel[Ident], item: Nel[Ident],
state: ItemState, state: ItemState,
collective: Ident collective: CollectiveId
): F[AddResult] ): F[AddResult]
def restore(items: Nel[Ident], collective: Ident): F[UpdateResult] def restore(items: Nel[Ident], collective: CollectiveId): F[UpdateResult]
def setItemDate( def setItemDate(
item: Nel[Ident], item: Nel[Ident],
date: Option[Timestamp], date: Option[Timestamp],
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
def setItemDueDate( def setItemDueDate(
item: Nel[Ident], item: Nel[Ident],
date: Option[Timestamp], date: Option[Timestamp],
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
def getProposals(item: Ident, collective: Ident): F[MetaProposalList] def getProposals(item: Ident, collective: CollectiveId): F[MetaProposalList]
def deleteItem(itemId: Ident, collective: Ident): F[Int] def deleteItem(itemId: Ident, collective: CollectiveId): F[Int]
def deleteItemMultiple(items: Nel[Ident], collective: Ident): F[Int] def deleteItemMultiple(items: Nel[Ident], collective: CollectiveId): F[Int]
def deleteAttachment(id: Ident, collective: Ident): F[Int] def deleteAttachment(id: Ident, collective: CollectiveId): F[Int]
def setDeletedState(items: Nel[Ident], collective: Ident): F[Int] def setDeletedState(items: Nel[Ident], collective: CollectiveId): F[Int]
def deleteAttachmentMultiple( def deleteAttachmentMultiple(
attachments: Nel[Ident], attachments: Nel[Ident],
collective: Ident collective: CollectiveId
): F[Int] ): F[Int]
def moveAttachmentBefore(itemId: Ident, source: Ident, target: Ident): F[AddResult] def moveAttachmentBefore(itemId: Ident, source: Ident, target: Ident): F[AddResult]
@ -188,7 +201,7 @@ trait OItem[F[_]] {
def setAttachmentName( def setAttachmentName(
attachId: Ident, attachId: Ident,
name: Option[String], name: Option[String],
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
/** Submits the item for re-processing. The list of attachment ids can be used to only /** Submits the item for re-processing. The list of attachment ids can be used to only
@ -196,28 +209,30 @@ trait OItem[F[_]] {
* attachments are reprocessed. This call only submits the job into the queue. * attachments are reprocessed. This call only submits the job into the queue.
*/ */
def reprocess( def reprocess(
cid: CollectiveId,
item: Ident, item: Ident,
attachments: List[Ident], attachments: List[Ident],
account: AccountId submitter: UserTaskScope
): F[UpdateResult] ): F[UpdateResult]
def reprocessAll( def reprocessAll(
cid: CollectiveId,
items: Nel[Ident], items: Nel[Ident],
account: AccountId submitter: UserTaskScope
): F[UpdateResult] ): F[UpdateResult]
/** Submits a task that finds all non-converted pdfs and triggers converting them using /** Submits a task that finds all non-converted pdfs and triggers converting them using
* ocrmypdf. Each file is converted by a separate task. * ocrmypdf. Each file is converted by a separate task.
*/ */
def convertAllPdf( def convertAllPdf(
collective: Option[Ident], collective: Option[CollectiveId],
submitter: Option[Ident] submitter: UserTaskScope
): F[UpdateResult] ): F[UpdateResult]
/** Submits a task that (re)generates the preview image for an attachment. */ /** Submits a task that (re)generates the preview image for an attachment. */
def generatePreview( def generatePreview(
args: MakePreviewArgs, args: MakePreviewArgs,
account: AccountId account: UserTaskScope
): F[UpdateResult] ): F[UpdateResult]
/** Submits a task that (re)generates the preview images for all attachments. */ /** Submits a task that (re)generates the preview images for all attachments. */
@ -227,7 +242,7 @@ trait OItem[F[_]] {
def merge( def merge(
logger: Logger[F], logger: Logger[F],
items: Nel[Ident], items: Nel[Ident],
collective: Ident collective: CollectiveId
): F[UpdateResult] ): F[UpdateResult]
} }
@ -248,7 +263,7 @@ object OItem {
def merge( def merge(
logger: Logger[F], logger: Logger[F],
items: Nel[Ident], items: Nel[Ident],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
Merge(logger, store, this, createIndex).merge(items, collective).attempt.map { Merge(logger, store, this, createIndex).merge(items, collective).attempt.map {
case Right(Right(_)) => UpdateResult.success case Right(Right(_)) => UpdateResult.success
@ -269,14 +284,14 @@ object OItem {
def linkTags( def linkTags(
item: Ident, item: Ident,
tags: List[String], tags: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] = ): F[AttachedEvent[UpdateResult]] =
linkTagsMultipleItems(Nel.of(item), tags, collective) linkTagsMultipleItems(Nel.of(item), tags, collective)
def linkTagsMultipleItems( def linkTagsMultipleItems(
items: Nel[Ident], items: Nel[Ident],
tags: List[String], tags: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] = ): F[AttachedEvent[UpdateResult]] =
tags.distinct match { tags.distinct match {
case Nil => AttachedEvent.only(UpdateResult.success).pure[F] case Nil => AttachedEvent.only(UpdateResult.success).pure[F]
@ -305,7 +320,7 @@ object OItem {
def removeTagsOfCategories( def removeTagsOfCategories(
item: Ident, item: Ident,
collective: Ident, collective: CollectiveId,
categories: Set[String] categories: Set[String]
): F[AttachedEvent[UpdateResult]] = ): F[AttachedEvent[UpdateResult]] =
if (categories.isEmpty) { if (categories.isEmpty) {
@ -328,7 +343,7 @@ object OItem {
def removeTagsMultipleItems( def removeTagsMultipleItems(
items: Nel[Ident], items: Nel[Ident],
tags: List[String], tags: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] = ): F[AttachedEvent[UpdateResult]] =
tags.distinct match { tags.distinct match {
case Nil => AttachedEvent.only(UpdateResult.success).pure[F] case Nil => AttachedEvent.only(UpdateResult.success).pure[F]
@ -354,7 +369,7 @@ object OItem {
def toggleTags( def toggleTags(
item: Ident, item: Ident,
tags: List[String], tags: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] = ): F[AttachedEvent[UpdateResult]] =
tags.distinct match { tags.distinct match {
case Nil => AttachedEvent.only(UpdateResult.success).pure[F] case Nil => AttachedEvent.only(UpdateResult.success).pure[F]
@ -383,14 +398,14 @@ object OItem {
def setTags( def setTags(
item: Ident, item: Ident,
tagIds: List[String], tagIds: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] = ): F[AttachedEvent[UpdateResult]] =
setTagsMultipleItems(Nel.of(item), tagIds, collective) setTagsMultipleItems(Nel.of(item), tagIds, collective)
def setTagsMultipleItems( def setTagsMultipleItems(
items: Nel[Ident], items: Nel[Ident],
tags: List[String], tags: List[String],
collective: Ident collective: CollectiveId
): F[AttachedEvent[UpdateResult]] = { ): F[AttachedEvent[UpdateResult]] = {
val dbTask = val dbTask =
for { for {
@ -411,7 +426,7 @@ object OItem {
} }
def addNewTag( def addNewTag(
collective: Ident, collective: CollectiveId,
item: Ident, item: Ident,
tag: RTag tag: RTag
): F[AttachedEvent[AddResult]] = ): F[AttachedEvent[AddResult]] =
@ -448,7 +463,7 @@ object OItem {
def setDirection( def setDirection(
items: Nel[Ident], items: Nel[Ident],
direction: Direction, direction: Direction,
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
UpdateResult.fromUpdate( UpdateResult.fromUpdate(
store store
@ -458,7 +473,7 @@ object OItem {
def setFolder( def setFolder(
item: Ident, item: Ident,
folder: Option[String], folder: Option[String],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
for { for {
result <- store.transact(RItem.updateFolder(item, collective, folder)).attempt result <- store.transact(RItem.updateFolder(item, collective, folder)).attempt
@ -478,7 +493,7 @@ object OItem {
def setFolderMultiple( def setFolderMultiple(
items: Nel[Ident], items: Nel[Ident],
folder: Option[String], folder: Option[String],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
for { for {
results <- items.traverse(i => setFolder(i, folder, collective)) results <- items.traverse(i => setFolder(i, folder, collective))
@ -499,7 +514,7 @@ object OItem {
def setCorrOrg( def setCorrOrg(
items: Nel[Ident], items: Nel[Ident],
org: Option[Ident], org: Option[Ident],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
UpdateResult.fromUpdate( UpdateResult.fromUpdate(
store store
@ -533,7 +548,7 @@ object OItem {
def setCorrPerson( def setCorrPerson(
items: Nel[Ident], items: Nel[Ident],
person: Option[Ident], person: Option[Ident],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
UpdateResult.fromUpdate( UpdateResult.fromUpdate(
store store
@ -571,7 +586,7 @@ object OItem {
def setConcPerson( def setConcPerson(
items: Nel[Ident], items: Nel[Ident],
person: Option[Ident], person: Option[Ident],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
UpdateResult.fromUpdate( UpdateResult.fromUpdate(
store store
@ -609,7 +624,7 @@ object OItem {
def setConcEquip( def setConcEquip(
items: Nel[Ident], items: Nel[Ident],
equip: Option[Ident], equip: Option[Ident],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
UpdateResult.fromUpdate( UpdateResult.fromUpdate(
store store
@ -640,7 +655,7 @@ object OItem {
def setNotes( def setNotes(
item: Ident, item: Ident,
notes: Option[String], notes: Option[String],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
UpdateResult UpdateResult
.fromUpdate( .fromUpdate(
@ -662,7 +677,7 @@ object OItem {
item: Ident, item: Ident,
notes: String, notes: String,
separator: Option[String], separator: Option[String],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
store store
.transact(RItem.appendNotes(item, collective, notes, separator)) .transact(RItem.appendNotes(item, collective, notes, separator))
@ -685,7 +700,11 @@ object OItem {
UpdateResult.notFound.pure[F] UpdateResult.notFound.pure[F]
} }
def setName(item: Ident, name: String, collective: Ident): F[UpdateResult] = def setName(
item: Ident,
name: String,
collective: CollectiveId
): F[UpdateResult] =
UpdateResult UpdateResult
.fromUpdate( .fromUpdate(
store store
@ -705,7 +724,7 @@ object OItem {
def setNameMultiple( def setNameMultiple(
items: Nel[Ident], items: Nel[Ident],
name: String, name: String,
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
for { for {
results <- items.traverse(i => setName(i, name, collective)) results <- items.traverse(i => setName(i, name, collective))
@ -726,7 +745,7 @@ object OItem {
def setStates( def setStates(
items: Nel[Ident], items: Nel[Ident],
state: ItemState, state: ItemState,
collective: Ident collective: CollectiveId
): F[AddResult] = ): F[AddResult] =
store store
.transact(RItem.updateStateForCollective(items, state, collective)) .transact(RItem.updateStateForCollective(items, state, collective))
@ -735,7 +754,7 @@ object OItem {
def restore( def restore(
items: Nel[Ident], items: Nel[Ident],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
UpdateResult.fromUpdate(for { UpdateResult.fromUpdate(for {
n <- store n <- store
@ -748,7 +767,7 @@ object OItem {
def setItemDate( def setItemDate(
items: Nel[Ident], items: Nel[Ident],
date: Option[Timestamp], date: Option[Timestamp],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
UpdateResult.fromUpdate( UpdateResult.fromUpdate(
store store
@ -758,42 +777,42 @@ object OItem {
def setItemDueDate( def setItemDueDate(
items: Nel[Ident], items: Nel[Ident],
date: Option[Timestamp], date: Option[Timestamp],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
UpdateResult.fromUpdate( UpdateResult.fromUpdate(
store store
.transact(RItem.updateDueDate(items, collective, date)) .transact(RItem.updateDueDate(items, collective, date))
) )
def deleteItem(itemId: Ident, collective: Ident): F[Int] = def deleteItem(itemId: Ident, collective: CollectiveId): F[Int] =
QItem QItem
.delete(store)(itemId, collective) .delete(store)(itemId, collective)
.flatTap(_ => fts.removeItem(logger, itemId)) .flatTap(_ => fts.removeItem(logger, itemId))
def deleteItemMultiple(items: Nel[Ident], collective: Ident): F[Int] = def deleteItemMultiple(items: Nel[Ident], collective: CollectiveId): F[Int] =
for { for {
itemIds <- store.transact(RItem.filterItems(items, collective)) itemIds <- store.transact(RItem.filterItems(items, collective))
results <- itemIds.traverse(item => deleteItem(item, collective)) results <- itemIds.traverse(item => deleteItem(item, collective))
n = results.sum n = results.sum
} yield n } yield n
def setDeletedState(items: Nel[Ident], collective: Ident): F[Int] = def setDeletedState(items: Nel[Ident], collective: CollectiveId): F[Int] =
for { for {
n <- store.transact(RItem.setState(items, collective, ItemState.Deleted)) n <- store.transact(RItem.setState(items, collective, ItemState.Deleted))
_ <- items.traverse(id => fts.removeItem(logger, id)) _ <- items.traverse(id => fts.removeItem(logger, id))
} yield n } yield n
def getProposals(item: Ident, collective: Ident): F[MetaProposalList] = def getProposals(item: Ident, collective: CollectiveId): F[MetaProposalList] =
store.transact(QAttachment.getMetaProposals(item, collective)) store.transact(QAttachment.getMetaProposals(item, collective))
def deleteAttachment(id: Ident, collective: Ident): F[Int] = def deleteAttachment(id: Ident, collective: CollectiveId): F[Int] =
QAttachment QAttachment
.deleteSingleAttachment(store)(id, collective) .deleteSingleAttachment(store)(id, collective)
.flatTap(_ => fts.removeAttachment(logger, id)) .flatTap(_ => fts.removeAttachment(logger, id))
def deleteAttachmentMultiple( def deleteAttachmentMultiple(
attachments: Nel[Ident], attachments: Nel[Ident],
collective: Ident collective: CollectiveId
): F[Int] = ): F[Int] =
for { for {
attachmentIds <- store.transact( attachmentIds <- store.transact(
@ -808,7 +827,7 @@ object OItem {
def setAttachmentName( def setAttachmentName(
attachId: Ident, attachId: Ident,
name: Option[String], name: Option[String],
collective: Ident collective: CollectiveId
): F[UpdateResult] = ): F[UpdateResult] =
UpdateResult UpdateResult
.fromUpdate( .fromUpdate(
@ -833,49 +852,52 @@ object OItem {
) )
def reprocess( def reprocess(
cid: CollectiveId,
item: Ident, item: Ident,
attachments: List[Ident], attachments: List[Ident],
account: AccountId submitter: UserTaskScope
): F[UpdateResult] = ): F[UpdateResult] =
(for { (for {
_ <- OptionT( _ <- OptionT(
store.transact(RItem.findByIdAndCollective(item, account.collective)) store.transact(RItem.findByIdAndCollective(item, cid))
) )
args = ReProcessItemArgs(item, attachments) args = ReProcessItemArgs(item, attachments)
job <- OptionT.liftF( job <- OptionT.liftF(
JobFactory.reprocessItem[F](args, account, Priority.Low) JobFactory.reprocessItem[F](args, submitter, Priority.Low)
) )
_ <- OptionT.liftF(jobStore.insertIfNew(job.encode)) _ <- OptionT.liftF(jobStore.insertIfNew(job.encode))
} yield UpdateResult.success).getOrElse(UpdateResult.notFound) } yield UpdateResult.success).getOrElse(UpdateResult.notFound)
def reprocessAll( def reprocessAll(
cid: CollectiveId,
items: Nel[Ident], items: Nel[Ident],
account: AccountId submitter: UserTaskScope
): F[UpdateResult] = ): F[UpdateResult] =
UpdateResult.fromUpdate(for { UpdateResult.fromUpdate(for {
items <- store.transact(RItem.filterItems(items, account.collective)) items <- store.transact(RItem.filterItems(items, cid))
jobs <- items jobs <- items
.map(item => ReProcessItemArgs(item, Nil)) .map(item => ReProcessItemArgs(item, Nil))
.traverse(arg => JobFactory.reprocessItem[F](arg, account, Priority.Low)) .traverse(arg => JobFactory.reprocessItem[F](arg, submitter, Priority.Low))
.map(_.map(_.encode)) .map(_.map(_.encode))
_ <- jobStore.insertAllIfNew(jobs) _ <- jobStore.insertAllIfNew(jobs)
} yield items.size) } yield items.size)
def convertAllPdf( def convertAllPdf(
collective: Option[Ident], collective: Option[CollectiveId],
submitter: Option[Ident] submitter: UserTaskScope
): F[UpdateResult] = ): F[UpdateResult] =
for { for {
job <- JobFactory.convertAllPdfs[F](collective, submitter, Priority.Low) job <- JobFactory
.convertAllPdfs[F](ConvertAllPdfArgs(collective), submitter, Priority.Low)
_ <- jobStore.insertIfNew(job.encode) _ <- jobStore.insertIfNew(job.encode)
} yield UpdateResult.success } yield UpdateResult.success
def generatePreview( def generatePreview(
args: MakePreviewArgs, args: MakePreviewArgs,
account: AccountId submitter: UserTaskScope
): F[UpdateResult] = ): F[UpdateResult] =
for { for {
job <- JobFactory.makePreview[F](args, account.some) job <- JobFactory.makePreview[F](args, submitter)
_ <- jobStore.insertIfNew(job.encode) _ <- jobStore.insertIfNew(job.encode)
} yield UpdateResult.success } yield UpdateResult.success
@ -883,7 +905,8 @@ object OItem {
storeMode: MakePreviewArgs.StoreMode storeMode: MakePreviewArgs.StoreMode
): F[UpdateResult] = ): F[UpdateResult] =
for { for {
job <- JobFactory.allPreviews[F](AllPreviewsArgs(None, storeMode), None) job <- JobFactory
.allPreviews[F](AllPreviewsArgs(None, storeMode), UserTaskScope.system)
_ <- jobStore.insertIfNew(job.encode) _ <- jobStore.insertIfNew(job.encode)
} yield UpdateResult.success } yield UpdateResult.success

View File

@ -12,7 +12,7 @@ import cats.implicits._
import docspell.backend.ops.OItemLink.LinkResult import docspell.backend.ops.OItemLink.LinkResult
import docspell.backend.ops.search.OSearch import docspell.backend.ops.search.OSearch
import docspell.common.{AccountId, Ident} import docspell.common._
import docspell.query.ItemQuery import docspell.query.ItemQuery
import docspell.query.ItemQueryDsl._ import docspell.query.ItemQueryDsl._
import docspell.store.qb.Batch import docspell.store.qb.Batch
@ -22,12 +22,16 @@ import docspell.store.{AddResult, Store}
trait OItemLink[F[_]] { trait OItemLink[F[_]] {
def addAll(cid: Ident, target: Ident, related: NonEmptyList[Ident]): F[LinkResult] def addAll(
cid: CollectiveId,
target: Ident,
related: NonEmptyList[Ident]
): F[LinkResult]
def removeAll(cid: Ident, target: Ident, related: NonEmptyList[Ident]): F[Unit] def removeAll(cid: CollectiveId, target: Ident, related: NonEmptyList[Ident]): F[Unit]
def getRelated( def getRelated(
account: AccountId, account: AccountInfo,
item: Ident, item: Ident,
batch: Batch batch: Batch
): F[Vector[ListItemWithTags]] ): F[Vector[ListItemWithTags]]
@ -48,12 +52,12 @@ object OItemLink {
def apply[F[_]: Sync](store: Store[F], search: OSearch[F]): OItemLink[F] = def apply[F[_]: Sync](store: Store[F], search: OSearch[F]): OItemLink[F] =
new OItemLink[F] { new OItemLink[F] {
def getRelated( def getRelated(
accountId: AccountId, accountId: AccountInfo,
item: Ident, item: Ident,
batch: Batch batch: Batch
): F[Vector[ListItemWithTags]] = ): F[Vector[ListItemWithTags]] =
store store
.transact(RItemLink.findLinked(accountId.collective, item)) .transact(RItemLink.findLinked(accountId.collectiveId, item))
.map(ids => NonEmptyList.fromList(ids.toList)) .map(ids => NonEmptyList.fromList(ids.toList))
.flatMap { .flatMap {
case Some(nel) => case Some(nel) =>
@ -69,14 +73,22 @@ object OItemLink {
Vector.empty[ListItemWithTags].pure[F] Vector.empty[ListItemWithTags].pure[F]
} }
def addAll(cid: Ident, target: Ident, related: NonEmptyList[Ident]): F[LinkResult] = def addAll(
cid: CollectiveId,
target: Ident,
related: NonEmptyList[Ident]
): F[LinkResult] =
if (related.contains_(target)) LinkResult.linkTargetItemError.pure[F] if (related.contains_(target)) LinkResult.linkTargetItemError.pure[F]
else related.traverse(addSingle(cid, target, _)).as(LinkResult.Success) else related.traverse(addSingle(cid, target, _)).as(LinkResult.Success)
def removeAll(cid: Ident, target: Ident, related: NonEmptyList[Ident]): F[Unit] = def removeAll(
cid: CollectiveId,
target: Ident,
related: NonEmptyList[Ident]
): F[Unit] =
store.transact(RItemLink.deleteAll(cid, target, related)).void store.transact(RItemLink.deleteAll(cid, target, related)).void
def addSingle(cid: Ident, target: Ident, related: Ident): F[Unit] = { def addSingle(cid: CollectiveId, target: Ident, related: Ident): F[Unit] = {
val exists = RItemLink.exists(cid, target, related) val exists = RItemLink.exists(cid, target, related)
val insert = RItemLink.insertNew(cid, target, related) val insert = RItemLink.insertNew(cid, target, related)
store.add(insert, exists).flatMap { store.add(insert, exists).flatMap {

View File

@ -21,32 +21,39 @@ import docspell.store.records._
import doobie.implicits._ import doobie.implicits._
trait OItemSearch[F[_]] { trait OItemSearch[F[_]] {
def findItem(id: Ident, collective: Ident): F[Option[ItemData]] def findItem(id: Ident, collective: CollectiveId): F[Option[ItemData]]
def findDeleted(collective: Ident, maxUpdate: Timestamp, limit: Int): F[Vector[RItem]] def findDeleted(
collective: CollectiveId,
maxUpdate: Timestamp,
limit: Int
): F[Vector[RItem]]
def findAttachment(id: Ident, collective: Ident): F[Option[AttachmentData[F]]] def findAttachment(id: Ident, collective: CollectiveId): F[Option[AttachmentData[F]]]
def findAttachmentSource( def findAttachmentSource(
id: Ident, id: Ident,
collective: Ident collective: CollectiveId
): F[Option[AttachmentSourceData[F]]] ): F[Option[AttachmentSourceData[F]]]
def findAttachmentArchive( def findAttachmentArchive(
id: Ident, id: Ident,
collective: Ident collective: CollectiveId
): F[Option[AttachmentArchiveData[F]]] ): F[Option[AttachmentArchiveData[F]]]
def findAttachmentPreview( def findAttachmentPreview(
id: Ident, id: Ident,
collective: Ident collective: CollectiveId
): F[Option[AttachmentPreviewData[F]]] ): F[Option[AttachmentPreviewData[F]]]
def findItemPreview(item: Ident, collective: Ident): F[Option[AttachmentPreviewData[F]]] def findItemPreview(
item: Ident,
collective: CollectiveId
): F[Option[AttachmentPreviewData[F]]]
def findAttachmentMeta(id: Ident, collective: Ident): F[Option[RAttachmentMeta]] def findAttachmentMeta(id: Ident, collective: CollectiveId): F[Option[RAttachmentMeta]]
def findByFileCollective(checksum: String, collective: Ident): F[Vector[RItem]] def findByFileCollective(checksum: String, collective: CollectiveId): F[Vector[RItem]]
def findByFileSource(checksum: String, sourceId: Ident): F[Option[Vector[RItem]]] def findByFileSource(checksum: String, sourceId: Ident): F[Option[Vector[RItem]]]
@ -114,12 +121,12 @@ object OItemSearch {
def apply[F[_]: Async](store: Store[F]): Resource[F, OItemSearch[F]] = def apply[F[_]: Async](store: Store[F]): Resource[F, OItemSearch[F]] =
Resource.pure[F, OItemSearch[F]](new OItemSearch[F] { Resource.pure[F, OItemSearch[F]](new OItemSearch[F] {
def findItem(id: Ident, collective: Ident): F[Option[ItemData]] = def findItem(id: Ident, collective: CollectiveId): F[Option[ItemData]] =
store store
.transact(QItem.findItem(id, collective)) .transact(QItem.findItem(id, collective))
def findDeleted( def findDeleted(
collective: Ident, collective: CollectiveId,
maxUpdate: Timestamp, maxUpdate: Timestamp,
limit: Int limit: Int
): F[Vector[RItem]] = ): F[Vector[RItem]] =
@ -129,7 +136,10 @@ object OItemSearch {
.compile .compile
.toVector .toVector
def findAttachment(id: Ident, collective: Ident): F[Option[AttachmentData[F]]] = def findAttachment(
id: Ident,
collective: CollectiveId
): F[Option[AttachmentData[F]]] =
store store
.transact(RAttachment.findByIdAndCollective(id, collective)) .transact(RAttachment.findByIdAndCollective(id, collective))
.flatMap { .flatMap {
@ -148,7 +158,7 @@ object OItemSearch {
def findAttachmentSource( def findAttachmentSource(
id: Ident, id: Ident,
collective: Ident collective: CollectiveId
): F[Option[AttachmentSourceData[F]]] = ): F[Option[AttachmentSourceData[F]]] =
store store
.transact(RAttachmentSource.findByIdAndCollective(id, collective)) .transact(RAttachmentSource.findByIdAndCollective(id, collective))
@ -168,7 +178,7 @@ object OItemSearch {
def findAttachmentPreview( def findAttachmentPreview(
id: Ident, id: Ident,
collective: Ident collective: CollectiveId
): F[Option[AttachmentPreviewData[F]]] = ): F[Option[AttachmentPreviewData[F]]] =
store store
.transact(RAttachmentPreview.findByIdAndCollective(id, collective)) .transact(RAttachmentPreview.findByIdAndCollective(id, collective))
@ -188,7 +198,7 @@ object OItemSearch {
def findItemPreview( def findItemPreview(
item: Ident, item: Ident,
collective: Ident collective: CollectiveId
): F[Option[AttachmentPreviewData[F]]] = ): F[Option[AttachmentPreviewData[F]]] =
store store
.transact(RAttachmentPreview.findByItemAndCollective(item, collective)) .transact(RAttachmentPreview.findByItemAndCollective(item, collective))
@ -208,7 +218,7 @@ object OItemSearch {
def findAttachmentArchive( def findAttachmentArchive(
id: Ident, id: Ident,
collective: Ident collective: CollectiveId
): F[Option[AttachmentArchiveData[F]]] = ): F[Option[AttachmentArchiveData[F]]] =
store store
.transact(RAttachmentArchive.findByIdAndCollective(id, collective)) .transact(RAttachmentArchive.findByIdAndCollective(id, collective))
@ -234,15 +244,21 @@ object OItemSearch {
.map(fm => f(fm)) .map(fm => f(fm))
.value .value
def findAttachmentMeta(id: Ident, collective: Ident): F[Option[RAttachmentMeta]] = def findAttachmentMeta(
id: Ident,
collective: CollectiveId
): F[Option[RAttachmentMeta]] =
store.transact(QAttachment.getAttachmentMeta(id, collective)) store.transact(QAttachment.getAttachmentMeta(id, collective))
def findByFileCollective(checksum: String, collective: Ident): F[Vector[RItem]] = def findByFileCollective(
checksum: String,
collective: CollectiveId
): F[Vector[RItem]] =
store.transact(QItem.findByChecksum(checksum, collective, Set.empty)) store.transact(QItem.findByChecksum(checksum, collective, Set.empty))
def findByFileSource(checksum: String, sourceId: Ident): F[Option[Vector[RItem]]] = def findByFileSource(checksum: String, sourceId: Ident): F[Option[Vector[RItem]]] =
store.transact((for { store.transact((for {
coll <- OptionT(RSource.findCollective(sourceId)) coll <- OptionT(RSource.findCollectiveId(sourceId))
items <- OptionT.liftF(QItem.findByChecksum(checksum, coll, Set.empty)) items <- OptionT.liftF(QItem.findByChecksum(checksum, coll, Set.empty))
} yield items).value) } yield items).value)
}) })

View File

@ -14,6 +14,7 @@ import docspell.backend.ops.OJob.{CollectiveQueueState, JobCancelResult}
import docspell.common._ import docspell.common._
import docspell.pubsub.api.PubSubT import docspell.pubsub.api.PubSubT
import docspell.scheduler.msg.JobDone import docspell.scheduler.msg.JobDone
import docspell.scheduler.usertask.UserTaskScope
import docspell.store.Store import docspell.store.Store
import docspell.store.UpdateResult import docspell.store.UpdateResult
import docspell.store.queries.QJobQueue import docspell.store.queries.QJobQueue
@ -21,13 +22,13 @@ import docspell.store.records.{RJob, RJobLog}
trait OJob[F[_]] { trait OJob[F[_]] {
def queueState(collective: Ident, maxResults: Int): F[CollectiveQueueState] def queueState(collective: UserTaskScope, maxResults: Int): F[CollectiveQueueState]
def cancelJob(id: Ident, collective: Ident): F[JobCancelResult] def cancelJob(id: Ident, collective: UserTaskScope): F[JobCancelResult]
def setPriority(id: Ident, collective: Ident, prio: Priority): F[UpdateResult] def setPriority(id: Ident, collective: UserTaskScope, prio: Priority): F[UpdateResult]
def getUnfinishedJobCount(collective: Ident): F[Int] def getUnfinishedJobCount(collective: UserTaskScope): F[Int]
} }
object OJob { object OJob {
@ -61,20 +62,34 @@ object OJob {
Resource.pure[F, OJob[F]](new OJob[F] { Resource.pure[F, OJob[F]](new OJob[F] {
private[this] val logger = docspell.logging.getLogger[F] private[this] val logger = docspell.logging.getLogger[F]
def queueState(collective: Ident, maxResults: Int): F[CollectiveQueueState] = private def scopeToGroup(s: UserTaskScope) =
s.collectiveId
.map(_.valueAsIdent)
.getOrElse(DocspellSystem.taskGroup)
def queueState(
collective: UserTaskScope,
maxResults: Int
): F[CollectiveQueueState] =
store store
.transact( .transact(
QJobQueue.queueStateSnapshot(collective, maxResults.toLong) QJobQueue.queueStateSnapshot(scopeToGroup(collective), maxResults.toLong)
) )
.map(t => JobDetail(t._1, t._2)) .map(t => JobDetail(t._1, t._2))
.compile .compile
.toVector .toVector
.map(CollectiveQueueState) .map(CollectiveQueueState)
def setPriority(id: Ident, collective: Ident, prio: Priority): F[UpdateResult] = def setPriority(
UpdateResult.fromUpdate(store.transact(RJob.setPriority(id, collective, prio))) id: Ident,
collective: UserTaskScope,
prio: Priority
): F[UpdateResult] =
UpdateResult.fromUpdate(
store.transact(RJob.setPriority(id, scopeToGroup(collective), prio))
)
def cancelJob(id: Ident, collective: Ident): F[JobCancelResult] = { def cancelJob(id: Ident, collective: UserTaskScope): F[JobCancelResult] = {
def remove(job: RJob): F[JobCancelResult] = def remove(job: RJob): F[JobCancelResult] =
for { for {
n <- store.transact(RJob.delete(job.id)) n <- store.transact(RJob.delete(job.id))
@ -99,7 +114,9 @@ object OJob {
} }
(for { (for {
job <- OptionT(store.transact(RJob.findByIdAndGroup(id, collective))) job <- OptionT(
store.transact(RJob.findByIdAndGroup(id, scopeToGroup(collective)))
)
result <- OptionT.liftF( result <- OptionT.liftF(
if (job.isInProgress) tryCancel(job) if (job.isInProgress) tryCancel(job)
else remove(job) else remove(job)
@ -108,7 +125,7 @@ object OJob {
.getOrElse(JobCancelResult.jobNotFound) .getOrElse(JobCancelResult.jobNotFound)
} }
def getUnfinishedJobCount(collective: Ident): F[Int] = def getUnfinishedJobCount(collective: UserTaskScope): F[Int] =
store.transact(RJob.getUnfinishedCount(collective)) store.transact(RJob.getUnfinishedCount(scopeToGroup(collective)))
}) })
} }

View File

@ -21,33 +21,38 @@ import emil._
trait OMail[F[_]] { trait OMail[F[_]] {
def getSmtpSettings(accId: AccountId, nameQ: Option[String]): F[Vector[RUserEmail]] def getSmtpSettings(userId: Ident, nameQ: Option[String]): F[Vector[RUserEmail]]
def findSmtpSettings(accId: AccountId, name: Ident): OptionT[F, RUserEmail] def findSmtpSettings(userId: Ident, name: Ident): OptionT[F, RUserEmail]
def createSmtpSettings(accId: AccountId, data: SmtpSettings): F[AddResult] def createSmtpSettings(userId: Ident, data: SmtpSettings): F[AddResult]
def updateSmtpSettings(accId: AccountId, name: Ident, data: OMail.SmtpSettings): F[Int] def updateSmtpSettings(userId: Ident, name: Ident, data: OMail.SmtpSettings): F[Int]
def deleteSmtpSettings(accId: AccountId, name: Ident): F[Int] def deleteSmtpSettings(userId: Ident, name: Ident): F[Int]
def getImapSettings(accId: AccountId, nameQ: Option[String]): F[Vector[RUserImap]] def getImapSettings(userId: Ident, nameQ: Option[String]): F[Vector[RUserImap]]
def findImapSettings(accId: AccountId, name: Ident): OptionT[F, RUserImap] def findImapSettings(userId: Ident, name: Ident): OptionT[F, RUserImap]
def createImapSettings(accId: AccountId, data: ImapSettings): F[AddResult] def createImapSettings(userId: Ident, data: ImapSettings): F[AddResult]
def updateImapSettings(accId: AccountId, name: Ident, data: OMail.ImapSettings): F[Int] def updateImapSettings(userId: Ident, name: Ident, data: OMail.ImapSettings): F[Int]
def deleteImapSettings(accId: AccountId, name: Ident): F[Int] def deleteImapSettings(userId: Ident, name: Ident): F[Int]
def sendMail(accId: AccountId, name: Ident, m: ItemMail): F[SendResult] def sendMail(
userId: Ident,
collectiveId: CollectiveId,
name: Ident,
m: ItemMail
): F[SendResult]
def getSentMailsForItem(accId: AccountId, itemId: Ident): F[Vector[Sent]] def getSentMailsForItem(collectiveId: CollectiveId, itemId: Ident): F[Vector[Sent]]
def getSentMail(accId: AccountId, mailId: Ident): OptionT[F, Sent] def getSentMail(collectiveId: CollectiveId, mailId: Ident): OptionT[F, Sent]
def deleteSentMail(accId: AccountId, mailId: Ident): F[Int] def deleteSentMail(collectiveId: CollectiveId, mailId: Ident): F[Int]
} }
object OMail { object OMail {
@ -124,9 +129,9 @@ object OMail {
mailReplyTo: Option[MailAddress] mailReplyTo: Option[MailAddress]
) { ) {
def toRecord(accId: AccountId) = def toRecord(userId: Ident) =
RUserEmail.fromAccount( RUserEmail.fromUser(
accId, userId,
name, name,
smtpHost, smtpHost,
smtpPort, smtpPort,
@ -150,9 +155,9 @@ object OMail {
imapOAuth2: Boolean imapOAuth2: Boolean
) { ) {
def toRecord(accId: AccountId) = def toRecord(userId: Ident) =
RUserImap.fromAccount( RUserImap.fromUser(
accId, userId,
name, name,
imapHost, imapHost,
imapPort, imapPort,
@ -167,74 +172,79 @@ object OMail {
def apply[F[_]: Async](store: Store[F], emil: Emil[F]): Resource[F, OMail[F]] = def apply[F[_]: Async](store: Store[F], emil: Emil[F]): Resource[F, OMail[F]] =
Resource.pure[F, OMail[F]](new OMail[F] { Resource.pure[F, OMail[F]](new OMail[F] {
def getSmtpSettings( def getSmtpSettings(
accId: AccountId, userId: Ident,
nameQ: Option[String] nameQ: Option[String]
): F[Vector[RUserEmail]] = ): F[Vector[RUserEmail]] =
store.transact(RUserEmail.findByAccount(accId, nameQ)) store.transact(RUserEmail.findByAccount(userId, nameQ))
def findSmtpSettings(accId: AccountId, name: Ident): OptionT[F, RUserEmail] = def findSmtpSettings(userId: Ident, name: Ident): OptionT[F, RUserEmail] =
OptionT(store.transact(RUserEmail.getByName(accId, name))) OptionT(store.transact(RUserEmail.getByName(userId, name)))
def createSmtpSettings(accId: AccountId, s: SmtpSettings): F[AddResult] = def createSmtpSettings(userId: Ident, s: SmtpSettings): F[AddResult] =
(for { (for {
ru <- OptionT(store.transact(s.toRecord(accId).value)) ru <- OptionT(store.transact(s.toRecord(userId).value))
ins = RUserEmail.insert(ru) ins = RUserEmail.insert(ru)
exists = RUserEmail.exists(ru.uid, ru.name) exists = RUserEmail.exists(ru.uid, ru.name)
res <- OptionT.liftF(store.add(ins, exists)) res <- OptionT.liftF(store.add(ins, exists))
} yield res).getOrElse(AddResult.Failure(new Exception("User not found"))) } yield res).getOrElse(AddResult.Failure(new Exception("User not found")))
def updateSmtpSettings( def updateSmtpSettings(
accId: AccountId, userId: Ident,
name: Ident, name: Ident,
data: SmtpSettings data: SmtpSettings
): F[Int] = { ): F[Int] = {
val op = for { val op = for {
um <- OptionT(RUserEmail.getByName(accId, name)) um <- OptionT(RUserEmail.getByName(userId, name))
ru <- data.toRecord(accId) ru <- data.toRecord(userId)
n <- OptionT.liftF(RUserEmail.update(um.id, ru)) n <- OptionT.liftF(RUserEmail.update(um.id, ru))
} yield n } yield n
store.transact(op.value).map(_.getOrElse(0)) store.transact(op.value).map(_.getOrElse(0))
} }
def deleteSmtpSettings(accId: AccountId, name: Ident): F[Int] = def deleteSmtpSettings(userId: Ident, name: Ident): F[Int] =
store.transact(RUserEmail.delete(accId, name)) store.transact(RUserEmail.delete(userId, name))
def getImapSettings(accId: AccountId, nameQ: Option[String]): F[Vector[RUserImap]] = def getImapSettings(userId: Ident, nameQ: Option[String]): F[Vector[RUserImap]] =
store.transact(RUserImap.findByAccount(accId, nameQ)) store.transact(RUserImap.findByAccount(userId, nameQ))
def findImapSettings(accId: AccountId, name: Ident): OptionT[F, RUserImap] = def findImapSettings(userId: Ident, name: Ident): OptionT[F, RUserImap] =
OptionT(store.transact(RUserImap.getByName(accId, name))) OptionT(store.transact(RUserImap.getByName(userId, name)))
def createImapSettings(accId: AccountId, data: ImapSettings): F[AddResult] = def createImapSettings(userId: Ident, data: ImapSettings): F[AddResult] =
(for { (for {
ru <- OptionT(store.transact(data.toRecord(accId).value)) ru <- OptionT(store.transact(data.toRecord(userId).value))
ins = RUserImap.insert(ru) ins = RUserImap.insert(ru)
exists = RUserImap.exists(ru.uid, ru.name) exists = RUserImap.exists(ru.uid, ru.name)
res <- OptionT.liftF(store.add(ins, exists)) res <- OptionT.liftF(store.add(ins, exists))
} yield res).getOrElse(AddResult.Failure(new Exception("User not found"))) } yield res).getOrElse(AddResult.Failure(new Exception("User not found")))
def updateImapSettings( def updateImapSettings(
accId: AccountId, userId: Ident,
name: Ident, name: Ident,
data: OMail.ImapSettings data: OMail.ImapSettings
): F[Int] = { ): F[Int] = {
val op = for { val op = for {
um <- OptionT(RUserImap.getByName(accId, name)) um <- OptionT(RUserImap.getByName(userId, name))
ru <- data.toRecord(accId) ru <- data.toRecord(userId)
n <- OptionT.liftF(RUserImap.update(um.id, ru)) n <- OptionT.liftF(RUserImap.update(um.id, ru))
} yield n } yield n
store.transact(op.value).map(_.getOrElse(0)) store.transact(op.value).map(_.getOrElse(0))
} }
def deleteImapSettings(accId: AccountId, name: Ident): F[Int] = def deleteImapSettings(userId: Ident, name: Ident): F[Int] =
store.transact(RUserImap.delete(accId, name)) store.transact(RUserImap.delete(userId, name))
def sendMail(accId: AccountId, name: Ident, m: ItemMail): F[SendResult] = { def sendMail(
userId: Ident,
collectiveId: CollectiveId,
name: Ident,
m: ItemMail
): F[SendResult] = {
val getSmtpSettings: OptionT[F, RUserEmail] = val getSmtpSettings: OptionT[F, RUserEmail] =
OptionT(store.transact(RUserEmail.getByName(accId, name))) OptionT(store.transact(RUserEmail.getByName(userId, name)))
def createMail(sett: RUserEmail): OptionT[F, Mail[F]] = { def createMail(sett: RUserEmail): OptionT[F, Mail[F]] = {
import _root_.emil.builder._ import _root_.emil.builder._
@ -243,7 +253,7 @@ object OMail {
_ <- OptionT.liftF(store.transact(RItem.existsById(m.item))).filter(identity) _ <- OptionT.liftF(store.transact(RItem.existsById(m.item))).filter(identity)
ras <- OptionT.liftF( ras <- OptionT.liftF(
store.transact( store.transact(
RAttachment.findByItemAndCollectiveWithMeta(m.item, accId.collective) RAttachment.findByItemAndCollectiveWithMeta(m.item, collectiveId)
) )
) )
} yield { } yield {
@ -275,7 +285,7 @@ object OMail {
val save = for { val save = for {
data <- RSentMail.forItem( data <- RSentMail.forItem(
m.item, m.item,
accId, userId,
msgId, msgId,
cfg.mailFrom, cfg.mailFrom,
name, name,
@ -307,17 +317,20 @@ object OMail {
} yield conv).getOrElse(SendResult.NotFound) } yield conv).getOrElse(SendResult.NotFound)
} }
def getSentMailsForItem(accId: AccountId, itemId: Ident): F[Vector[Sent]] = def getSentMailsForItem(
collectiveId: CollectiveId,
itemId: Ident
): F[Vector[Sent]] =
store store
.transact(QMails.findMails(accId.collective, itemId)) .transact(QMails.findMails(collectiveId, itemId))
.map(_.map(t => Sent.create(t._1, t._2))) .map(_.map(t => Sent.create(t._1, t._2)))
def getSentMail(accId: AccountId, mailId: Ident): OptionT[F, Sent] = def getSentMail(collectiveId: CollectiveId, mailId: Ident): OptionT[F, Sent] =
OptionT(store.transact(QMails.findMail(accId.collective, mailId))).map(t => OptionT(store.transact(QMails.findMail(collectiveId, mailId))).map(t =>
Sent.create(t._1, t._2) Sent.create(t._1, t._2)
) )
def deleteSentMail(accId: AccountId, mailId: Ident): F[Int] = def deleteSentMail(collectiveId: CollectiveId, mailId: Ident): F[Int] =
store.transact(QMails.delete(accId.collective, mailId)) store.transact(QMails.delete(collectiveId, mailId))
}) })
} }

View File

@ -39,35 +39,35 @@ trait ONotification[F[_]] {
def findNotificationChannel( def findNotificationChannel(
ref: ChannelRef, ref: ChannelRef,
account: AccountId userId: Ident
): F[Vector[NotificationChannel]] ): F[Vector[NotificationChannel]]
def listChannels(account: AccountId): F[Vector[Channel]] def listChannels(userId: Ident): F[Vector[Channel]]
def deleteChannel(id: Ident, account: AccountId): F[UpdateResult] def deleteChannel(id: Ident, userId: Ident): F[UpdateResult]
def createChannel(channel: Channel, account: AccountId): F[AddResult] def createChannel(channel: Channel, userId: Ident): F[AddResult]
def updateChannel(channel: Channel, account: AccountId): F[UpdateResult] def updateChannel(channel: Channel, userId: Ident): F[UpdateResult]
def listHooks(account: AccountId): F[Vector[Hook]] def listHooks(userId: Ident): F[Vector[Hook]]
def deleteHook(id: Ident, account: AccountId): F[UpdateResult] def deleteHook(id: Ident, userId: Ident): F[UpdateResult]
def createHook(hook: Hook, account: AccountId): F[AddResult] def createHook(hook: Hook, userId: Ident): F[AddResult]
def updateHook(hook: Hook, account: AccountId): F[UpdateResult] def updateHook(hook: Hook, userId: Ident): F[UpdateResult]
def sampleEvent( def sampleEvent(
evt: EventType, evt: EventType,
account: AccountId, account: AccountInfo,
baseUrl: Option[LenientUri] baseUrl: Option[LenientUri]
): F[EventContext] ): F[EventContext]
def sendSampleEvent( def sendSampleEvent(
evt: EventType, evt: EventType,
channel: Nel[ChannelRef], channel: Nel[ChannelRef],
account: AccountId, account: AccountInfo,
baseUrl: Option[LenientUri] baseUrl: Option[LenientUri]
): F[ONotification.SendTestResult] ): F[ONotification.SendTestResult]
} }
@ -81,13 +81,6 @@ object ONotification {
Resource.pure[F, ONotification[F]](new ONotification[F] { Resource.pure[F, ONotification[F]](new ONotification[F] {
val log = docspell.logging.getLogger[F] val log = docspell.logging.getLogger[F]
def withUserId[A](
account: AccountId
)(f: Ident => F[UpdateResult]): F[UpdateResult] =
OptionT(store.transact(RUser.findIdByAccount(account)))
.semiflatMap(f)
.getOrElse(UpdateResult.notFound)
def offerEvents(ev: Iterable[Event]): F[Unit] = def offerEvents(ev: Iterable[Event]): F[Unit] =
ev.toList.traverse(notMod.offer).as(()) ev.toList.traverse(notMod.offer).as(())
@ -100,7 +93,7 @@ object ONotification {
def sampleEvent( def sampleEvent(
evt: EventType, evt: EventType,
account: AccountId, account: AccountInfo,
baseUrl: Option[LenientUri] baseUrl: Option[LenientUri]
): F[EventContext] = ): F[EventContext] =
Event Event
@ -110,14 +103,14 @@ object ONotification {
def sendSampleEvent( def sendSampleEvent(
evt: EventType, evt: EventType,
channels: Nel[ChannelRef], channels: Nel[ChannelRef],
account: AccountId, account: AccountInfo,
baseUrl: Option[LenientUri] baseUrl: Option[LenientUri]
): F[SendTestResult] = ): F[SendTestResult] =
(for { (for {
ev <- sampleEvent(evt, account, baseUrl) ev <- sampleEvent(evt, account, baseUrl)
logbuf <- Logger.buffer() logbuf <- Logger.buffer()
ch <- channels.toList.toVector.flatTraverse( ch <- channels.toList.toVector.flatTraverse(
findNotificationChannel(_, account) findNotificationChannel(_, account.userId)
) )
_ <- notMod.send(logbuf._2.andThen(log), ev, ch) _ <- notMod.send(logbuf._2.andThen(log), ev, ch)
logs <- logbuf._1.get logs <- logbuf._1.get
@ -131,54 +124,51 @@ object ONotification {
SendTestResult(false, Vector(ev)) SendTestResult(false, Vector(ev))
} }
def listChannels(account: AccountId): F[Vector[Channel]] = def listChannels(userId: Ident): F[Vector[Channel]] =
store store
.transact(RNotificationChannel.getByAccount(account)) .transact(RNotificationChannel.getByAccount(userId))
.map(_.map(ChannelConv.makeChannel)) .map(_.map(ChannelConv.makeChannel))
def deleteChannel(id: Ident, account: AccountId): F[UpdateResult] = def deleteChannel(id: Ident, userId: Ident): F[UpdateResult] =
UpdateResult UpdateResult
.fromUpdate( .fromUpdate(
store.transact(RNotificationChannel.deleteByAccount(id, account)) store.transact(RNotificationChannel.deleteByAccount(id, userId))
) )
.flatTap(_ => log.info(s"Deleted channel ${id.id} for ${account.asString}")) .flatTap(_ => log.info(s"Deleted channel ${id.id} for ${userId.id}"))
def createChannel(channel: Channel, account: AccountId): F[AddResult] = def createChannel(channel: Channel, userId: Ident): F[AddResult] =
(for { (for {
newId <- OptionT.liftF(Ident.randomId[F]) newId <- OptionT.liftF(Ident.randomId[F])
userId <- OptionT(store.transact(RUser.findIdByAccount(account)))
r <- ChannelConv.makeRecord[F](store, channel, newId, userId) r <- ChannelConv.makeRecord[F](store, channel, newId, userId)
_ <- OptionT.liftF(store.transact(RNotificationChannel.insert(r))) _ <- OptionT.liftF(store.transact(RNotificationChannel.insert(r)))
_ <- OptionT.liftF(log.debug(s"Created channel $r for $account")) _ <- OptionT.liftF(log.debug(s"Created channel $r for ${userId.id}"))
} yield AddResult.Success) } yield AddResult.Success)
.getOrElse(AddResult.failure(new Exception("User not found!"))) .getOrElse(AddResult.failure(new Exception("User not found!")))
def updateChannel(channel: Channel, account: AccountId): F[UpdateResult] = def updateChannel(channel: Channel, userId: Ident): F[UpdateResult] =
(for { (for {
userId <- OptionT(store.transact(RUser.findIdByAccount(account)))
r <- ChannelConv.makeRecord[F](store, channel, channel.id, userId) r <- ChannelConv.makeRecord[F](store, channel, channel.id, userId)
n <- OptionT.liftF(store.transact(RNotificationChannel.update(r))) n <- OptionT.liftF(store.transact(RNotificationChannel.update(r)))
} yield UpdateResult.fromUpdateRows(n)).getOrElse(UpdateResult.notFound) } yield UpdateResult.fromUpdateRows(n)).getOrElse(UpdateResult.notFound)
def listHooks(account: AccountId): F[Vector[Hook]] = def listHooks(userId: Ident): F[Vector[Hook]] =
store.transact(for { store.transact(for {
list <- RNotificationHook.findAllByAccount(account) list <- RNotificationHook.findAllByAccount(userId)
res <- list.traverse((Hook.fromRecord _).tupled) res <- list.traverse((Hook.fromRecord _).tupled)
} yield res) } yield res)
def deleteHook(id: Ident, account: AccountId): F[UpdateResult] = def deleteHook(id: Ident, userId: Ident): F[UpdateResult] =
UpdateResult UpdateResult
.fromUpdate(store.transact(RNotificationHook.deleteByAccount(id, account))) .fromUpdate(store.transact(RNotificationHook.deleteByAccount(id, userId)))
def createHook(hook: Hook, account: AccountId): F[AddResult] = def createHook(hook: Hook, userId: Ident): F[AddResult] =
(for { (for {
_ <- OptionT.liftF(log.debug(s"Creating new notification hook: $hook")) _ <- OptionT.liftF(log.debug(s"Creating new notification hook: $hook"))
userId <- OptionT(store.transact(RUser.findIdByAccount(account)))
hr <- OptionT.liftF(Hook.makeRecord(userId, hook)) hr <- OptionT.liftF(Hook.makeRecord(userId, hook))
_ <- OptionT.liftF( _ <- OptionT.liftF(
store.transact( store.transact(
RNotificationHook.insert(hr) *> RNotificationHookChannel RNotificationHook.insert(hr) *> RNotificationHookChannel
.updateAll(hr.id, hook.channels.toList) .updateAll(hr.id, hook.channels)
) )
) )
_ <- OptionT.liftF( _ <- OptionT.liftF(
@ -187,13 +177,11 @@ object ONotification {
} yield AddResult.Success) } yield AddResult.Success)
.getOrElse(AddResult.failure(new Exception("User or channel not found!"))) .getOrElse(AddResult.failure(new Exception("User or channel not found!")))
def updateHook(hook: Hook, account: AccountId): F[UpdateResult] = { def updateHook(hook: Hook, userId: Ident): F[UpdateResult] = {
def withHook(f: RNotificationHook => F[UpdateResult]): F[UpdateResult] = def withHook(f: RNotificationHook => F[UpdateResult]): F[UpdateResult] =
withUserId(account)(userId =>
OptionT(store.transact(RNotificationHook.getById(hook.id, userId))) OptionT(store.transact(RNotificationHook.getById(hook.id, userId)))
.semiflatMap(f) .semiflatMap(f)
.getOrElse(UpdateResult.notFound) .getOrElse(UpdateResult.notFound)
)
def doUpdate(r: RNotificationHook): F[UpdateResult] = def doUpdate(r: RNotificationHook): F[UpdateResult] =
UpdateResult.fromUpdate(store.transact(for { UpdateResult.fromUpdate(store.transact(for {
@ -201,10 +189,7 @@ object ONotification {
r.id, r.id,
if (hook.allEvents) Nil else hook.events if (hook.allEvents) Nil else hook.events
) )
nc <- RNotificationHookChannel.updateAll( nc <- RNotificationHookChannel.updateAll(r.id, hook.channels)
r.id,
hook.channels.toList
)
nr <- RNotificationHook.update( nr <- RNotificationHook.update(
r.copy( r.copy(
enabled = hook.enabled, enabled = hook.enabled,
@ -230,10 +215,9 @@ object ONotification {
def findNotificationChannel( def findNotificationChannel(
ref: ChannelRef, ref: ChannelRef,
accountId: AccountId userId: Ident
): F[Vector[NotificationChannel]] = ): F[Vector[NotificationChannel]] =
(for { (for {
userId <- OptionT(store.transact(RUser.findIdByAccount(accountId)))
rec <- OptionT(store.transact(RNotificationChannel.getByRef(ref, userId))) rec <- OptionT(store.transact(RNotificationChannel.getByRef(ref, userId)))
ch <- OptionT.liftF(store.transact(QNotification.readChannel(rec))) ch <- OptionT.liftF(store.transact(QNotification.readChannel(rec)))
} yield ch).getOrElse(Vector.empty) } yield ch).getOrElse(Vector.empty)

View File

@ -18,14 +18,15 @@ import docspell.store.records._
trait OOrganization[F[_]] { trait OOrganization[F[_]] {
def findAllOrg( def findAllOrg(
account: AccountId, collectiveId: CollectiveId,
query: Option[String], query: Option[String],
order: OrganizationOrder order: OrganizationOrder
): F[Vector[OrgAndContacts]] ): F[Vector[OrgAndContacts]]
def findOrg(account: AccountId, orgId: Ident): F[Option[OrgAndContacts]]
def findOrg(collectiveId: CollectiveId, orgId: Ident): F[Option[OrgAndContacts]]
def findAllOrgRefs( def findAllOrgRefs(
account: AccountId, collectiveId: CollectiveId,
nameQuery: Option[String], nameQuery: Option[String],
order: OrganizationOrder order: OrganizationOrder
): F[Vector[IdRef]] ): F[Vector[IdRef]]
@ -35,15 +36,15 @@ trait OOrganization[F[_]] {
def updateOrg(s: OrgAndContacts): F[AddResult] def updateOrg(s: OrgAndContacts): F[AddResult]
def findAllPerson( def findAllPerson(
account: AccountId, collectiveId: CollectiveId,
query: Option[String], query: Option[String],
order: PersonOrder order: PersonOrder
): F[Vector[PersonAndContacts]] ): F[Vector[PersonAndContacts]]
def findPerson(account: AccountId, persId: Ident): F[Option[PersonAndContacts]] def findPerson(collectiveId: CollectiveId, persId: Ident): F[Option[PersonAndContacts]]
def findAllPersonRefs( def findAllPersonRefs(
account: AccountId, collectiveId: CollectiveId,
nameQuery: Option[String], nameQuery: Option[String],
order: PersonOrder order: PersonOrder
): F[Vector[IdRef]] ): F[Vector[IdRef]]
@ -54,9 +55,9 @@ trait OOrganization[F[_]] {
/** Update a person with their contacts. The additional organization is ignored. */ /** Update a person with their contacts. The additional organization is ignored. */
def updatePerson(s: PersonAndContacts): F[AddResult] def updatePerson(s: PersonAndContacts): F[AddResult]
def deleteOrg(orgId: Ident, collective: Ident): F[AddResult] def deleteOrg(orgId: Ident, collective: CollectiveId): F[AddResult]
def deletePerson(personId: Ident, collective: Ident): F[AddResult] def deletePerson(personId: Ident, collective: CollectiveId): F[AddResult]
} }
object OOrganization { object OOrganization {
@ -134,32 +135,32 @@ object OOrganization {
Resource.pure[F, OOrganization[F]](new OOrganization[F] { Resource.pure[F, OOrganization[F]](new OOrganization[F] {
def findAllOrg( def findAllOrg(
account: AccountId, collectiveId: CollectiveId,
query: Option[String], query: Option[String],
order: OrganizationOrder order: OrganizationOrder
): F[Vector[OrgAndContacts]] = ): F[Vector[OrgAndContacts]] =
store store
.transact( .transact(
QOrganization QOrganization
.findOrgAndContact(account.collective, query, OrganizationOrder(order)) .findOrgAndContact(collectiveId, query, OrganizationOrder(order))
) )
.map { case (org, cont) => OrgAndContacts(org, cont) } .map { case (org, cont) => OrgAndContacts(org, cont) }
.compile .compile
.toVector .toVector
def findOrg(account: AccountId, orgId: Ident): F[Option[OrgAndContacts]] = def findOrg(collectiveId: CollectiveId, orgId: Ident): F[Option[OrgAndContacts]] =
store store
.transact(QOrganization.getOrgAndContact(account.collective, orgId)) .transact(QOrganization.getOrgAndContact(collectiveId, orgId))
.map(_.map { case (org, cont) => OrgAndContacts(org, cont) }) .map(_.map { case (org, cont) => OrgAndContacts(org, cont) })
def findAllOrgRefs( def findAllOrgRefs(
account: AccountId, collectiveId: CollectiveId,
nameQuery: Option[String], nameQuery: Option[String],
order: OrganizationOrder order: OrganizationOrder
): F[Vector[IdRef]] = ): F[Vector[IdRef]] =
store.transact( store.transact(
ROrganization.findAllRef( ROrganization.findAllRef(
account.collective, collectiveId,
nameQuery, nameQuery,
OrganizationOrder(order) OrganizationOrder(order)
) )
@ -172,31 +173,34 @@ object OOrganization {
QOrganization.updateOrg(s.org, s.contacts, s.org.cid)(store) QOrganization.updateOrg(s.org, s.contacts, s.org.cid)(store)
def findAllPerson( def findAllPerson(
account: AccountId, collectiveId: CollectiveId,
query: Option[String], query: Option[String],
order: PersonOrder order: PersonOrder
): F[Vector[PersonAndContacts]] = ): F[Vector[PersonAndContacts]] =
store store
.transact( .transact(
QOrganization QOrganization
.findPersonAndContact(account.collective, query, PersonOrder(order)) .findPersonAndContact(collectiveId, query, PersonOrder(order))
) )
.map { case (person, org, cont) => PersonAndContacts(person, org, cont) } .map { case (person, org, cont) => PersonAndContacts(person, org, cont) }
.compile .compile
.toVector .toVector
def findPerson(account: AccountId, persId: Ident): F[Option[PersonAndContacts]] = def findPerson(
collectiveId: CollectiveId,
persId: Ident
): F[Option[PersonAndContacts]] =
store store
.transact(QOrganization.getPersonAndContact(account.collective, persId)) .transact(QOrganization.getPersonAndContact(collectiveId, persId))
.map(_.map { case (pers, org, cont) => PersonAndContacts(pers, org, cont) }) .map(_.map { case (pers, org, cont) => PersonAndContacts(pers, org, cont) })
def findAllPersonRefs( def findAllPersonRefs(
account: AccountId, collectiveId: CollectiveId,
nameQuery: Option[String], nameQuery: Option[String],
order: PersonOrder order: PersonOrder
): F[Vector[IdRef]] = ): F[Vector[IdRef]] =
store.transact( store.transact(
RPerson.findAllRef(account.collective, nameQuery, PersonOrder.nameOnly(order)) RPerson.findAllRef(collectiveId, nameQuery, PersonOrder.nameOnly(order))
) )
def addPerson(s: PersonAndContacts): F[AddResult] = def addPerson(s: PersonAndContacts): F[AddResult] =
@ -205,13 +209,13 @@ object OOrganization {
def updatePerson(s: PersonAndContacts): F[AddResult] = def updatePerson(s: PersonAndContacts): F[AddResult] =
QOrganization.updatePerson(s.person, s.contacts, s.person.cid)(store) QOrganization.updatePerson(s.person, s.contacts, s.person.cid)(store)
def deleteOrg(orgId: Ident, collective: Ident): F[AddResult] = def deleteOrg(orgId: Ident, collective: CollectiveId): F[AddResult] =
store store
.transact(QOrganization.deleteOrg(orgId, collective)) .transact(QOrganization.deleteOrg(orgId, collective))
.attempt .attempt
.map(AddResult.fromUpdate) .map(AddResult.fromUpdate)
def deletePerson(personId: Ident, collective: Ident): F[AddResult] = def deletePerson(personId: Ident, collective: CollectiveId): F[AddResult] =
store store
.transact(QOrganization.deletePerson(personId, collective)) .transact(QOrganization.deletePerson(personId, collective))
.attempt .attempt

View File

@ -6,7 +6,6 @@
package docspell.backend.ops package docspell.backend.ops
import cats.data.OptionT
import cats.effect._ import cats.effect._
import cats.implicits._ import cats.implicits._
@ -19,19 +18,19 @@ import docspell.store.records._
trait OQueryBookmarks[F[_]] { trait OQueryBookmarks[F[_]] {
def getAll(account: AccountId): F[Vector[OQueryBookmarks.Bookmark]] def getAll(account: AccountInfo): F[Vector[OQueryBookmarks.Bookmark]]
def findOne(account: AccountId, nameOrId: String): F[Option[OQueryBookmarks.Bookmark]] def findOne(account: AccountInfo, nameOrId: String): F[Option[OQueryBookmarks.Bookmark]]
def create(account: AccountId, bookmark: OQueryBookmarks.NewBookmark): F[AddResult] def create(account: AccountInfo, bookmark: OQueryBookmarks.NewBookmark): F[AddResult]
def update( def update(
account: AccountId, account: AccountInfo,
id: Ident, id: Ident,
bookmark: OQueryBookmarks.NewBookmark bookmark: OQueryBookmarks.NewBookmark
): F[UpdateResult] ): F[UpdateResult]
def delete(account: AccountId, bookmark: Ident): F[Unit] def delete(account: AccountInfo, bookmark: Ident): F[Unit]
} }
object OQueryBookmarks { object OQueryBookmarks {
@ -53,39 +52,43 @@ object OQueryBookmarks {
def apply[F[_]: Sync](store: Store[F]): Resource[F, OQueryBookmarks[F]] = def apply[F[_]: Sync](store: Store[F]): Resource[F, OQueryBookmarks[F]] =
Resource.pure(new OQueryBookmarks[F] { Resource.pure(new OQueryBookmarks[F] {
def getAll(account: AccountId): F[Vector[Bookmark]] = def getAll(account: AccountInfo): F[Vector[Bookmark]] =
store store
.transact(RQueryBookmark.allForUser(account)) .transact(RQueryBookmark.allForUser(account.collectiveId, account.userId))
.map(_.map(convert.toModel)) .map(_.map(convert.toModel))
def findOne( def findOne(
account: AccountId, account: AccountInfo,
nameOrId: String nameOrId: String
): F[Option[OQueryBookmarks.Bookmark]] = ): F[Option[OQueryBookmarks.Bookmark]] =
store store
.transact(RQueryBookmark.findByNameOrId(account, nameOrId)) .transact(
RQueryBookmark.findByNameOrId(account.collectiveId, account.userId, nameOrId)
)
.map(_.map(convert.toModel)) .map(_.map(convert.toModel))
def create(account: AccountId, b: NewBookmark): F[AddResult] = { def create(account: AccountInfo, b: NewBookmark): F[AddResult] = {
val uid = if (b.personal) account.userId.some else None
val record = val record =
RQueryBookmark.createNew(account, b.name, b.label, b.query, b.personal) RQueryBookmark.createNew(
store.transact(RQueryBookmark.insertIfNotExists(account, record)) account.collectiveId,
uid,
b.name,
b.label,
b.query
)
store.transact(
RQueryBookmark.insertIfNotExists(account.collectiveId, account.userId, record)
)
} }
def update(account: AccountId, id: Ident, b: NewBookmark): F[UpdateResult] = def update(acc: AccountInfo, id: Ident, b: NewBookmark): F[UpdateResult] =
UpdateResult.fromUpdate( UpdateResult.fromUpdate(
store.transact { store.transact(RQueryBookmark.update(convert.toRecord(acc, id, b)))
(for {
userId <- OptionT(RUser.findIdByAccount(account))
n <- OptionT.liftF(
RQueryBookmark.update(convert.toRecord(account, id, userId, b))
)
} yield n).getOrElse(0)
}
) )
def delete(account: AccountId, bookmark: Ident): F[Unit] = def delete(account: AccountInfo, bookmark: Ident): F[Unit] =
store.transact(RQueryBookmark.deleteById(account.collective, bookmark)).as(()) store.transact(RQueryBookmark.deleteById(account.collectiveId, bookmark)).as(())
}) })
private object convert { private object convert {
@ -94,17 +97,16 @@ object OQueryBookmarks {
Bookmark(r.id, r.name, r.label, r.query, r.isPersonal, r.created) Bookmark(r.id, r.name, r.label, r.query, r.isPersonal, r.created)
def toRecord( def toRecord(
account: AccountId, account: AccountInfo,
id: Ident, id: Ident,
userId: Ident,
b: NewBookmark b: NewBookmark
): RQueryBookmark = ): RQueryBookmark =
RQueryBookmark( RQueryBookmark(
id, id,
b.name, b.name,
b.label, b.label,
if (b.personal) userId.some else None, if (b.personal) account.userId.some else None,
account.collective, account.collectiveId,
b.query, b.query,
Timestamp.Epoch Timestamp.Epoch
) )

View File

@ -28,16 +28,16 @@ import scodec.bits.ByteVector
trait OShare[F[_]] { trait OShare[F[_]] {
def findAll( def findAll(
collective: Ident, collective: CollectiveId,
ownerLogin: Option[Ident], ownerLogin: Option[Ident],
query: Option[String] query: Option[String]
): F[List[ShareData]] ): F[List[ShareData]]
def delete(id: Ident, collective: Ident): F[Boolean] def delete(id: Ident, collective: CollectiveId): F[Boolean]
def addNew(share: OShare.NewShare): F[OShare.ChangeResult] def addNew(share: OShare.NewShare): F[OShare.ChangeResult]
def findOne(id: Ident, collective: Ident): OptionT[F, ShareData] def findOne(id: Ident, collective: CollectiveId): OptionT[F, ShareData]
def update( def update(
id: Ident, id: Ident,
@ -71,7 +71,12 @@ trait OShare[F[_]] {
*/ */
def parseQuery(share: ShareQuery, qs: String): QueryParseResult def parseQuery(share: ShareQuery, qs: String): QueryParseResult
def sendMail(account: AccountId, connection: Ident, mail: ShareMail): F[SendResult] def sendMail(
collectiveId: CollectiveId,
userId: Ident,
connection: Ident,
mail: ShareMail
): F[SendResult]
} }
object OShare { object OShare {
@ -97,7 +102,7 @@ object OShare {
case object NotFound extends SendResult case object NotFound extends SendResult
} }
final case class ShareQuery(id: Ident, account: AccountId, query: ItemQuery) final case class ShareQuery(id: Ident, account: AccountInfo, query: ItemQuery)
sealed trait VerifyResult { sealed trait VerifyResult {
def toEither: Either[String, ShareToken] = def toEither: Either[String, ShareToken] =
@ -143,7 +148,7 @@ object OShare {
def queryWithFulltext: ChangeResult = QueryWithFulltext def queryWithFulltext: ChangeResult = QueryWithFulltext
} }
final case class ShareData(share: RShare, user: RUser) final case class ShareData(share: RShare, account: AccountInfo)
def apply[F[_]: Async]( def apply[F[_]: Async](
store: Store[F], store: Store[F],
@ -155,7 +160,7 @@ object OShare {
private[this] val logger = docspell.logging.getLogger[F] private[this] val logger = docspell.logging.getLogger[F]
def findAll( def findAll(
collective: Ident, collective: CollectiveId,
ownerLogin: Option[Ident], ownerLogin: Option[Ident],
query: Option[String] query: Option[String]
): F[List[ShareData]] = ): F[List[ShareData]] =
@ -163,7 +168,7 @@ object OShare {
.transact(RShare.findAllByCollective(collective, ownerLogin, query)) .transact(RShare.findAllByCollective(collective, ownerLogin, query))
.map(_.map(ShareData.tupled)) .map(_.map(ShareData.tupled))
def delete(id: Ident, collective: Ident): F[Boolean] = def delete(id: Ident, collective: CollectiveId): F[Boolean] =
store.transact(RShare.deleteByIdAndCid(id, collective)).map(_ > 0) store.transact(RShare.deleteByIdAndCid(id, collective)).map(_ > 0)
def addNew(share: NewShare): F[ChangeResult] = def addNew(share: NewShare): F[ChangeResult] =
@ -225,7 +230,7 @@ object OShare {
case _ => true case _ => true
} }
def findOne(id: Ident, collective: Ident): OptionT[F, ShareData] = def findOne(id: Ident, collective: CollectiveId): OptionT[F, ShareData] =
RShare RShare
.findOne(id, collective) .findOne(id, collective)
.mapK(store.transform) .mapK(store.transform)
@ -286,8 +291,8 @@ object OShare {
RShare RShare
.findCurrentActive(id) .findCurrentActive(id)
.mapK(store.transform) .mapK(store.transform)
.map { case (share, user) => .map { case (share, accInfo) =>
ShareQuery(share.id, user.accountId, share.query) ShareQuery(share.id, accInfo, share.query)
} }
def findAttachmentPreview( def findAttachmentPreview(
@ -298,7 +303,7 @@ object OShare {
sq <- findShareQuery(shareId) sq <- findShareQuery(shareId)
_ <- checkAttachment(sq, AttachId(attachId.id)) _ <- checkAttachment(sq, AttachId(attachId.id))
res <- OptionT( res <- OptionT(
itemSearch.findAttachmentPreview(attachId, sq.account.collective) itemSearch.findAttachmentPreview(attachId, sq.account.collectiveId)
) )
} yield res } yield res
@ -306,14 +311,14 @@ object OShare {
for { for {
sq <- findShareQuery(shareId) sq <- findShareQuery(shareId)
_ <- checkAttachment(sq, AttachId(attachId.id)) _ <- checkAttachment(sq, AttachId(attachId.id))
res <- OptionT(itemSearch.findAttachment(attachId, sq.account.collective)) res <- OptionT(itemSearch.findAttachment(attachId, sq.account.collectiveId))
} yield res } yield res
def findItem(itemId: Ident, shareId: Ident): OptionT[F, ItemData] = def findItem(itemId: Ident, shareId: Ident): OptionT[F, ItemData] =
for { for {
sq <- findShareQuery(shareId) sq <- findShareQuery(shareId)
_ <- checkAttachment(sq, Expr.itemIdEq(itemId.id)) _ <- checkAttachment(sq, Expr.itemIdEq(itemId.id))
res <- OptionT(itemSearch.findItem(itemId, sq.account.collective)) res <- OptionT(itemSearch.findItem(itemId, sq.account.collectiveId))
} yield res } yield res
/** Check whether the attachment with the given id is in the results of the given /** Check whether the attachment with the given id is in the results of the given
@ -343,12 +348,13 @@ object OShare {
} }
def sendMail( def sendMail(
account: AccountId, collectiveId: CollectiveId,
userId: Ident,
connection: Ident, connection: Ident,
mail: ShareMail mail: ShareMail
): F[SendResult] = { ): F[SendResult] = {
val getSmtpSettings: OptionT[F, RUserEmail] = val getSmtpSettings: OptionT[F, RUserEmail] =
OptionT(store.transact(RUserEmail.getByName(account, connection))) OptionT(store.transact(RUserEmail.getByName(userId, connection)))
def createMail(sett: RUserEmail): OptionT[F, Mail[F]] = { def createMail(sett: RUserEmail): OptionT[F, Mail[F]] = {
import _root_.emil.builder._ import _root_.emil.builder._
@ -366,20 +372,19 @@ object OShare {
) )
} }
def sendMail(cfg: MailConfig, mail: Mail[F]): F[Either[SendResult, String]] = def doSendMail(cfg: MailConfig, mail: Mail[F]): F[Either[SendResult, String]] =
emil(cfg).send(mail).map(_.head).attempt.map(_.left.map(SendResult.SendFailure)) emil(cfg).send(mail).map(_.head).attempt.map(_.left.map(SendResult.SendFailure))
(for { (for {
_ <- RShare _ <- RShare
.findCurrentActive(mail.shareId) .findCurrentActive(mail.shareId)
.filter(_._2.cid == account.collective) .filter(_._2.collectiveId == collectiveId)
.mapK(store.transform) .mapK(store.transform)
mailCfg <- getSmtpSettings mailCfg <- getSmtpSettings
mail <- createMail(mailCfg) mail <- createMail(mailCfg)
mid <- OptionT.liftF(sendMail(mailCfg.toMailConfig, mail)) mid <- OptionT.liftF(doSendMail(mailCfg.toMailConfig, mail))
conv = mid.fold(identity, id => SendResult.Success(id)) conv = mid.fold(identity, id => SendResult.Success(id))
} yield conv).getOrElse(SendResult.NotFound) } yield conv).getOrElse(SendResult.NotFound)
} }
} }
} }

View File

@ -9,7 +9,7 @@ package docspell.backend.ops
import cats.effect.{Async, Resource} import cats.effect.{Async, Resource}
import cats.implicits._ import cats.implicits._
import docspell.common.{AccountId, Ident} import docspell.common._
import docspell.store.UpdateResult import docspell.store.UpdateResult
import docspell.store.records.RSource import docspell.store.records.RSource
import docspell.store.records.SourceData import docspell.store.records.SourceData
@ -17,22 +17,22 @@ import docspell.store.{AddResult, Store}
trait OSource[F[_]] { trait OSource[F[_]] {
def findAll(account: AccountId): F[Vector[SourceData]] def findAll(collectiveId: CollectiveId): F[Vector[SourceData]]
def add(s: RSource, tags: List[String]): F[AddResult] def add(s: RSource, tags: List[String]): F[AddResult]
def update(s: RSource, tags: List[String]): F[AddResult] def update(s: RSource, tags: List[String]): F[AddResult]
def delete(id: Ident, collective: Ident): F[UpdateResult] def delete(id: Ident, collective: CollectiveId): F[UpdateResult]
} }
object OSource { object OSource {
def apply[F[_]: Async](store: Store[F]): Resource[F, OSource[F]] = def apply[F[_]: Async](store: Store[F]): Resource[F, OSource[F]] =
Resource.pure[F, OSource[F]](new OSource[F] { Resource.pure[F, OSource[F]](new OSource[F] {
def findAll(account: AccountId): F[Vector[SourceData]] = def findAll(collectiveId: CollectiveId): F[Vector[SourceData]] =
store store
.transact(SourceData.findAll(account.collective, _.abbrev)) .transact(SourceData.findAll(collectiveId, _.abbrev))
.compile .compile
.to(Vector) .to(Vector)
@ -52,7 +52,7 @@ object OSource {
store.add(insert, exists).map(_.fold(identity, _.withMsg(msg), identity)) store.add(insert, exists).map(_.fold(identity, _.withMsg(msg), identity))
} }
def delete(id: Ident, collective: Ident): F[UpdateResult] = def delete(id: Ident, collective: CollectiveId): F[UpdateResult] =
UpdateResult.fromUpdate(store.transact(SourceData.delete(id, collective))) UpdateResult.fromUpdate(store.transact(SourceData.delete(id, collective)))
}) })

View File

@ -10,7 +10,7 @@ import cats.data.NonEmptyList
import cats.effect.{Async, Resource} import cats.effect.{Async, Resource}
import cats.implicits._ import cats.implicits._
import docspell.common.{AccountId, Ident} import docspell.common._
import docspell.store.records.RTagSource import docspell.store.records.RTagSource
import docspell.store.records.{RTag, RTagItem} import docspell.store.records.{RTag, RTagItem}
import docspell.store.{AddResult, Store} import docspell.store.{AddResult, Store}
@ -18,7 +18,7 @@ import docspell.store.{AddResult, Store}
trait OTag[F[_]] { trait OTag[F[_]] {
def findAll( def findAll(
account: AccountId, collectiveId: CollectiveId,
query: Option[String], query: Option[String],
order: OTag.TagOrder order: OTag.TagOrder
): F[Vector[RTag]] ): F[Vector[RTag]]
@ -27,7 +27,7 @@ trait OTag[F[_]] {
def update(s: RTag): F[AddResult] def update(s: RTag): F[AddResult]
def delete(id: Ident, collective: Ident): F[AddResult] def delete(id: Ident, collective: CollectiveId): F[AddResult]
/** Load all tags given their ids. Ids that are not available are ignored. */ /** Load all tags given their ids. Ids that are not available are ignored. */
def loadAll(ids: List[Ident]): F[Vector[RTag]] def loadAll(ids: List[Ident]): F[Vector[RTag]]
@ -66,11 +66,11 @@ object OTag {
def apply[F[_]: Async](store: Store[F]): Resource[F, OTag[F]] = def apply[F[_]: Async](store: Store[F]): Resource[F, OTag[F]] =
Resource.pure[F, OTag[F]](new OTag[F] { Resource.pure[F, OTag[F]](new OTag[F] {
def findAll( def findAll(
account: AccountId, collectiveId: CollectiveId,
query: Option[String], query: Option[String],
order: TagOrder order: TagOrder
): F[Vector[RTag]] = ): F[Vector[RTag]] =
store.transact(RTag.findAll(account.collective, query, TagOrder(order))) store.transact(RTag.findAll(collectiveId, query, TagOrder(order)))
def add(t: RTag): F[AddResult] = { def add(t: RTag): F[AddResult] = {
def insert = RTag.insert(t) def insert = RTag.insert(t)
@ -88,7 +88,7 @@ object OTag {
store.add(insert, exists).map(_.fold(identity, _.withMsg(msg), identity)) store.add(insert, exists).map(_.fold(identity, _.withMsg(msg), identity))
} }
def delete(id: Ident, collective: Ident): F[AddResult] = { def delete(id: Ident, collective: CollectiveId): F[AddResult] = {
val io = for { val io = for {
optTag <- RTag.findByIdAndCollective(id, collective) optTag <- RTag.findByIdAndCollective(id, collective)
n0 <- optTag.traverse(t => RTagItem.deleteTag(t.tagId)) n0 <- optTag.traverse(t => RTagItem.deleteTag(t.tagId))
@ -99,7 +99,7 @@ object OTag {
} }
def loadAll(ids: List[Ident]): F[Vector[RTag]] = def loadAll(ids: List[Ident]): F[Vector[RTag]] =
if (ids.isEmpty) Vector.empty.pure[F] if (ids.isEmpty) Vector.empty[RTag].pure[F]
else store.transact(RTag.findAllById(ids)) else store.transact(RTag.findAllById(ids))
}) })
} }

View File

@ -6,29 +6,30 @@
package docspell.backend.ops package docspell.backend.ops
import cats.data.OptionT
import cats.effect._ import cats.effect._
import cats.implicits._ import cats.implicits._
import docspell.backend.ops.OTotp.{ConfirmResult, InitResult, OtpState} import docspell.backend.ops.OTotp.{ConfirmResult, InitResult, OtpState}
import docspell.common._ import docspell.common._
import docspell.store.records.{RTotp, RUser} import docspell.store.records.RTotp
import docspell.store.{AddResult, Store, UpdateResult} import docspell.store.{AddResult, Store, UpdateResult}
import docspell.totp.{Key, OnetimePassword, Totp} import docspell.totp.{Key, OnetimePassword, Totp}
trait OTotp[F[_]] { trait OTotp[F[_]] {
/** Return whether TOTP is enabled for this account or not. */ /** Return whether TOTP is enabled for this account or not. */
def state(accountId: AccountId): F[OtpState] def state(accountId: AccountInfo): F[OtpState]
/** Initializes TOTP by generating a secret and storing it in the database. TOTP is /** Initializes TOTP by generating a secret and storing it in the database. TOTP is
* still disabled, it must be confirmed in order to be active. * still disabled, it must be confirmed in order to be active.
*/ */
def initialize(accountId: AccountId): F[InitResult] def initialize(accountId: AccountInfo): F[InitResult]
/** Confirms and finishes initialization. TOTP is active after this for the given /** Confirms and finishes initialization. TOTP is active after this for the given
* account. * account.
*/ */
def confirmInit(accountId: AccountId, otp: OnetimePassword): F[ConfirmResult] def confirmInit(accountId: AccountInfo, otp: OnetimePassword): F[ConfirmResult]
/** Disables TOTP and removes the shared secret. If a otp is specified, it must be /** Disables TOTP and removes the shared secret. If a otp is specified, it must be
* valid. * valid.
@ -57,7 +58,7 @@ object OTotp {
sealed trait InitResult sealed trait InitResult
object InitResult { object InitResult {
final case class Success(accountId: AccountId, key: Key) extends InitResult { final case class Success(accountId: AccountInfo, key: Key) extends InitResult {
def authenticatorUrl(issuer: String): LenientUri = def authenticatorUrl(issuer: String): LenientUri =
LenientUri.unsafe( LenientUri.unsafe(
s"otpauth://totp/$issuer:${accountId.asString}?secret=${key.data.toBase32}&issuer=$issuer" s"otpauth://totp/$issuer:${accountId.asString}?secret=${key.data.toBase32}&issuer=$issuer"
@ -67,7 +68,7 @@ object OTotp {
case object NotFound extends InitResult case object NotFound extends InitResult
final case class Failed(ex: Throwable) extends InitResult final case class Failed(ex: Throwable) extends InitResult
def success(accountId: AccountId, key: Key): InitResult = def success(accountId: AccountInfo, key: Key): InitResult =
Success(accountId, key) Success(accountId, key)
def alreadyExists: InitResult = AlreadyExists def alreadyExists: InitResult = AlreadyExists
@ -85,19 +86,16 @@ object OTotp {
Resource.pure[F, OTotp[F]](new OTotp[F] { Resource.pure[F, OTotp[F]](new OTotp[F] {
val log = docspell.logging.getLogger[F] val log = docspell.logging.getLogger[F]
def initialize(accountId: AccountId): F[InitResult] = def initialize(accountId: AccountInfo): F[InitResult] =
for { for {
_ <- log.info(s"Initializing TOTP for account ${accountId.asString}") _ <- log.info(s"Initializing TOTP for account ${accountId.asString}")
userId <- store.transact(RUser.findIdByAccount(accountId)) result <- for {
result <- userId match { record <- RTotp.generate[F](accountId.userId, totp.settings.mac)
case Some(uid) =>
for {
record <- RTotp.generate[F](uid, totp.settings.mac)
un <- store.transact(RTotp.updateDisabled(record)) un <- store.transact(RTotp.updateDisabled(record))
an <- an <-
if (un != 0) if (un != 0)
AddResult.entityExists("Entity exists, but update was ok").pure[F] AddResult.entityExists("Entity exists, but update was ok").pure[F]
else store.add(RTotp.insert(record), RTotp.existsByLogin(accountId)) else store.add(RTotp.insert(record), RTotp.existsByUserId(accountId.userId))
innerResult <- innerResult <-
if (un != 0) InitResult.success(accountId, record.secret).pure[F] if (un != 0) InitResult.success(accountId, record.secret).pure[F]
else else
@ -116,16 +114,13 @@ object OTotp {
InitResult.success(accountId, record.secret).pure[F] InitResult.success(accountId, record.secret).pure[F]
} }
} yield innerResult } yield innerResult
case None =>
log.warn(s"No user found for account: ${accountId.asString}!") *>
InitResult.NotFound.pure[F]
}
} yield result } yield result
def confirmInit(accountId: AccountId, otp: OnetimePassword): F[ConfirmResult] = def confirmInit(accountId: AccountInfo, otp: OnetimePassword): F[ConfirmResult] =
for { for {
_ <- log.info(s"Confirm TOTP setup for account ${accountId.asString}") _ <- log.info(s"Confirm TOTP setup for account ${accountId.asString}")
key <- store.transact(RTotp.findEnabledByLogin(accountId, false)) key <- store.transact(RTotp.findEnabledByUserId(accountId.userId, false))
now <- Timestamp.current[F] now <- Timestamp.current[F]
res <- key match { res <- key match {
case None => case None =>
@ -134,7 +129,7 @@ object OTotp {
val check = totp.checkPassword(r.secret, otp, now.value) val check = totp.checkPassword(r.secret, otp, now.value)
if (check) if (check)
store store
.transact(RTotp.setEnabled(accountId, true)) .transact(RTotp.setEnabled(accountId.userId, true))
.map(_ => ConfirmResult.Success) .map(_ => ConfirmResult.Success)
else ConfirmResult.Failed.pure[F] else ConfirmResult.Failed.pure[F]
} }
@ -154,7 +149,7 @@ object OTotp {
val check = totp.checkPassword(r.secret, pw, now.value) val check = totp.checkPassword(r.secret, pw, now.value)
if (check) if (check)
UpdateResult.fromUpdate( UpdateResult.fromUpdate(
store.transact(RTotp.setEnabled(accountId, false)) store.transact(RTotp.setEnabled(r.userId, false))
) )
else else
log.info(s"TOTP code was invalid. Not disabling it.") *> UpdateResult log.info(s"TOTP code was invalid. Not disabling it.") *> UpdateResult
@ -163,12 +158,17 @@ object OTotp {
} }
} yield res } yield res
case None => case None =>
UpdateResult.fromUpdate(store.transact(RTotp.setEnabled(accountId, false))) UpdateResult.fromUpdate {
(for {
key <- OptionT(RTotp.findEnabledByLogin(accountId, true))
n <- OptionT.liftF(RTotp.setEnabled(key.userId, false))
} yield n).mapK(store.transform).getOrElse(0)
}
} }
def state(accountId: AccountId): F[OtpState] = def state(acc: AccountInfo): F[OtpState] =
for { for {
record <- store.transact(RTotp.findEnabledByLogin(accountId, true)) record <- store.transact(RTotp.findEnabledByUserId(acc.userId, true))
result = record match { result = record match {
case Some(r) => case Some(r) =>
OtpState.Enabled(r.created) OtpState.Enabled(r.created)

View File

@ -14,6 +14,7 @@ import fs2.Stream
import docspell.backend.JobFactory import docspell.backend.JobFactory
import docspell.common._ import docspell.common._
import docspell.scheduler.usertask.UserTaskScope
import docspell.scheduler.{Job, JobStore} import docspell.scheduler.{Job, JobStore}
import docspell.store.Store import docspell.store.Store
import docspell.store.records._ import docspell.store.records._
@ -22,7 +23,8 @@ trait OUpload[F[_]] {
def submit( def submit(
data: OUpload.UploadData[F], data: OUpload.UploadData[F],
account: AccountId, collectiveId: CollectiveId,
userId: Option[Ident],
itemId: Option[Ident] itemId: Option[Ident]
): F[OUpload.UploadResult] ): F[OUpload.UploadResult]
@ -38,12 +40,13 @@ trait OUpload[F[_]] {
final def submitEither( final def submitEither(
data: OUpload.UploadData[F], data: OUpload.UploadData[F],
accOrSrc: Either[Ident, AccountId], accOrSrc: Either[Ident, CollectiveId],
userId: Option[Ident],
itemId: Option[Ident] itemId: Option[Ident]
): F[OUpload.UploadResult] = ): F[OUpload.UploadResult] =
accOrSrc match { accOrSrc match {
case Right(acc) => case Right(acc) =>
submit(data, acc, itemId) submit(data, acc, userId, itemId)
case Left(srcId) => case Left(srcId) =>
submit(data, srcId, itemId) submit(data, srcId, itemId)
} }
@ -90,7 +93,7 @@ object OUpload {
def noFiles: UploadResult = NoFiles def noFiles: UploadResult = NoFiles
/** A source (`RSource') could not be found for a given source-id. */ /** A source (`RSource`) could not be found for a given source-id. */
case object NoSource extends UploadResult case object NoSource extends UploadResult
def noSource: UploadResult = NoSource def noSource: UploadResult = NoSource
@ -99,6 +102,11 @@ object OUpload {
case object NoItem extends UploadResult case object NoItem extends UploadResult
def noItem: UploadResult = NoItem def noItem: UploadResult = NoItem
/** A collective with the given id was not found */
case object NoCollective extends UploadResult
def noCollective: UploadResult = NoCollective
} }
private def right[F[_]: Functor, A](a: F[A]): EitherT[F, UploadResult, A] = private def right[F[_]: Functor, A](a: F[A]): EitherT[F, UploadResult, A] =
@ -110,26 +118,30 @@ object OUpload {
): Resource[F, OUpload[F]] = ): Resource[F, OUpload[F]] =
Resource.pure[F, OUpload[F]](new OUpload[F] { Resource.pure[F, OUpload[F]](new OUpload[F] {
private[this] val logger = docspell.logging.getLogger[F] private[this] val logger = docspell.logging.getLogger[F]
def submit( def submit(
data: OUpload.UploadData[F], data: OUpload.UploadData[F],
account: AccountId, collectiveId: CollectiveId,
userId: Option[Ident],
itemId: Option[Ident] itemId: Option[Ident]
): F[OUpload.UploadResult] = ): F[OUpload.UploadResult] =
(for { (for {
_ <- checkExistingItem(itemId, account.collective) _ <- checkExistingItem(itemId, collectiveId)
files <- right(data.files.traverse(saveFile(account)).map(_.flatten)) coll <- OptionT(store.transact(RCollective.findById(collectiveId)))
.toRight(UploadResult.noCollective)
files <- right(data.files.traverse(saveFile(coll.id)).map(_.flatten))
_ <- checkFileList(files) _ <- checkFileList(files)
lang <- data.meta.language match { lang <- data.meta.language match {
case Some(lang) => right(lang.pure[F]) case Some(lang) => right(lang.pure[F])
case None => case None =>
right( right(
store store
.transact(RCollective.findLanguage(account.collective)) .transact(RCollective.findLanguage(collectiveId))
.map(_.getOrElse(Language.German)) .map(_.getOrElse(Language.German))
) )
} }
meta = ProcessItemArgs.ProcessMeta( meta = ProcessItemArgs.ProcessMeta(
account.collective, collectiveId,
itemId, itemId,
lang, lang,
data.meta.direction, data.meta.direction,
@ -143,12 +155,18 @@ object OUpload {
data.meta.attachmentsOnly data.meta.attachmentsOnly
) )
args = ProcessItemArgs(meta, files.toList) args = ProcessItemArgs(meta, files.toList)
jobs <- right(makeJobs(data, args, account)) jobs <- right(
makeJobs(
data,
args,
UserTaskScope(collectiveId, userId)
)
)
_ <- right(logger.debug(s"Storing jobs: $jobs")) _ <- right(logger.debug(s"Storing jobs: $jobs"))
res <- right(submitJobs(jobs.map(_.encode))) res <- right(submitJobs(jobs.map(_.encode)))
_ <- right( _ <- right(
store.transact( store.transact(
RSource.incrementCounter(data.meta.sourceAbbrev, account.collective) RSource.incrementCounter(data.meta.sourceAbbrev, collectiveId)
) )
) )
} yield res).fold(identity, identity) } yield res).fold(identity, identity)
@ -174,8 +192,7 @@ object OUpload {
), ),
priority = src.source.priority priority = src.source.priority
) )
accId = AccountId(src.source.cid, src.source.sid) result <- OptionT.liftF(submit(updata, src.source.cid, None, itemId))
result <- OptionT.liftF(submit(updata, accId, itemId))
} yield result).getOrElse(UploadResult.noSource) } yield result).getOrElse(UploadResult.noSource)
private def submitJobs(jobs: List[Job[String]]): F[OUpload.UploadResult] = private def submitJobs(jobs: List[Job[String]]): F[OUpload.UploadResult] =
@ -186,13 +203,13 @@ object OUpload {
/** Saves the file into the database. */ /** Saves the file into the database. */
private def saveFile( private def saveFile(
accountId: AccountId collectiveId: CollectiveId
)(file: File[F]): F[Option[ProcessItemArgs.File]] = )(file: File[F]): F[Option[ProcessItemArgs.File]] =
logger.info(s"Receiving file $file") *> logger.info(s"Receiving file $file") *>
file.data file.data
.through( .through(
store.fileRepo.save( store.fileRepo.save(
accountId.collective, collectiveId,
FileCategory.AttachmentSource, FileCategory.AttachmentSource,
MimeTypeHint(file.name, None) MimeTypeHint(file.name, None)
) )
@ -212,7 +229,7 @@ object OUpload {
private def checkExistingItem( private def checkExistingItem(
itemId: Option[Ident], itemId: Option[Ident],
coll: Ident coll: CollectiveId
): EitherT[F, UploadResult, Unit] = ): EitherT[F, UploadResult, Unit] =
itemId match { itemId match {
case None => case None =>
@ -232,22 +249,22 @@ object OUpload {
private def makeJobs( private def makeJobs(
data: UploadData[F], data: UploadData[F],
args: ProcessItemArgs, args: ProcessItemArgs,
account: AccountId submitter: UserTaskScope
): F[List[Job[ProcessItemArgs]]] = ): F[List[Job[ProcessItemArgs]]] =
if (data.meta.flattenArchives.getOrElse(false)) if (data.meta.flattenArchives.getOrElse(false))
JobFactory JobFactory
.multiUpload(args, account, data.priority, data.tracker) .multiUpload(args, submitter, data.priority, data.tracker)
.map(List(_)) .map(List(_))
else if (data.multiple) else if (data.multiple)
JobFactory.processItems( JobFactory.processItems(
args.files.map(f => args.copy(files = List(f))), args.files.map(f => args.copy(files = List(f))),
account, submitter,
data.priority, data.priority,
data.tracker data.tracker
) )
else else
JobFactory JobFactory
.processItem[F](args, account, data.priority, data.tracker) .processItem[F](args, submitter, data.priority, data.tracker)
.map(List(_)) .map(List(_))
}) })
} }

View File

@ -79,7 +79,7 @@ trait OSearch[F[_]] {
* `q.fix` part. * `q.fix` part.
*/ */
def parseQueryString( def parseQueryString(
accountId: AccountId, accountId: AccountInfo,
mode: SearchMode, mode: SearchMode,
qs: String qs: String
): QueryParseResult ): QueryParseResult
@ -94,7 +94,7 @@ object OSearch {
private[this] val logger = docspell.logging.getLogger[F] private[this] val logger = docspell.logging.getLogger[F]
def parseQueryString( def parseQueryString(
accountId: AccountId, accountId: AccountInfo,
mode: SearchMode, mode: SearchMode,
qs: String qs: String
): QueryParseResult = { ): QueryParseResult = {
@ -199,7 +199,7 @@ object OSearch {
timed <- Duration.stopTime[F] timed <- Duration.stopTime[F]
resolved <- store resolved <- store
.transact( .transact(
QItem.findItemsWithTags(q.fix.account.collective, Stream.emits(items)) QItem.findItemsWithTags(q.fix.account.collectiveId, Stream.emits(items))
) )
.compile .compile
.toVector .toVector
@ -233,13 +233,13 @@ object OSearch {
} }
private def createFtsQuery( private def createFtsQuery(
account: AccountId, account: AccountInfo,
ftq: String ftq: String
): F[FtsQuery] = ): F[FtsQuery] =
store store
.transact(QFolder.getMemberFolders(account)) .transact(QFolder.getMemberFolders(account.collectiveId, account.userId))
.map(folders => .map(folders =>
FtsQuery(ftq, account.collective, 500, 0) FtsQuery(ftq, account.collectiveId, 500, 0)
.withFolders(folders) .withFolders(folders)
) )

View File

@ -6,6 +6,7 @@
package docspell.backend.signup package docspell.backend.signup
import cats.data.OptionT
import cats.effect.{Async, Resource} import cats.effect.{Async, Resource}
import cats.implicits._ import cats.implicits._
@ -48,7 +49,7 @@ object OSignup {
def register(cfg: Config)(data: RegisterData): F[SignupResult] = def register(cfg: Config)(data: RegisterData): F[SignupResult] =
cfg.mode match { cfg.mode match {
case Config.Mode.Open => case Config.Mode.Open =>
addUser(data).map(SignupResult.fromAddResult) addNewAccount(data, AccountSource.Local).map(SignupResult.fromAddResult)
case Config.Mode.Closed => case Config.Mode.Closed =>
SignupResult.signupClosed.pure[F] SignupResult.signupClosed.pure[F]
@ -61,7 +62,9 @@ object OSignup {
min = now.minus(cfg.inviteTime) min = now.minus(cfg.inviteTime)
ok <- store.transact(RInvitation.useInvite(inv, min)) ok <- store.transact(RInvitation.useInvite(inv, min))
res <- res <-
if (ok) addUser(data).map(SignupResult.fromAddResult) if (ok)
addNewAccount(data, AccountSource.Local)
.map(SignupResult.fromAddResult)
else SignupResult.invalidInvitationKey.pure[F] else SignupResult.invalidInvitationKey.pure[F]
_ <- _ <-
if (retryInvite(res)) if (retryInvite(res))
@ -84,26 +87,37 @@ object OSignup {
SignupResult SignupResult
.failure(new Exception("Account source must not be LOCAL!")) .failure(new Exception("Account source must not be LOCAL!"))
.pure[F] .pure[F]
else else {
val maybeInsert: ConnectionIO[Unit] =
for { for {
recs <- makeRecords(data.collName, data.login, Password(""), data.source) now <- Timestamp.current[ConnectionIO]
cres <- store.add( cid <- OptionT(RCollective.findByName(data.collName))
RCollective.insert(recs._1), .map(_.id)
RCollective.existsById(data.collName) .getOrElseF(
RCollective.insert(RCollective.makeDefault(data.collName, now))
) )
ures <- store.add(RUser.insert(recs._2), RUser.exists(data.login))
res = cres match { uid <- Ident.randomId[ConnectionIO]
case AddResult.Failure(ex) => newUser = RUser.makeDefault(
uid,
data.login,
cid,
Password(""),
AccountSource.OpenId,
now
)
_ <- OptionT(RUser.findByLogin(data.login, cid.some))
.map(_ => 1)
.getOrElseF(RUser.insert(newUser))
} yield ()
store.transact(maybeInsert).attempt.map {
case Left(ex) =>
SignupResult.failure(ex) SignupResult.failure(ex)
case _ => case Right(_) =>
ures match {
case AddResult.Failure(ex) =>
SignupResult.failure(ex)
case _ =>
SignupResult.success SignupResult.success
} }
} }
} yield res
private def retryInvite(res: SignupResult): Boolean = private def retryInvite(res: SignupResult): Boolean =
res match { res match {
@ -119,41 +133,38 @@ object OSignup {
false false
} }
private def addUser(data: RegisterData): F[AddResult] = { private def addNewAccount(
def insert(coll: RCollective, user: RUser): ConnectionIO[Int] = data: RegisterData,
accountSource: AccountSource
): F[AddResult] = {
def insert: ConnectionIO[Int] =
for { for {
n1 <- RCollective.insert(coll) now <- Timestamp.current[ConnectionIO]
n2 <- RUser.insert(user) cid <- RCollective.insert(RCollective.makeDefault(data.collName, now))
} yield n1 + n2 uid <- Ident.randomId[ConnectionIO]
n2 <- RUser.insert(
RUser.makeDefault(
uid,
data.login,
cid,
if (data.password.isEmpty) data.password
else PasswordCrypt.crypt(data.password),
accountSource,
now
)
)
} yield n2
def collectiveExists: ConnectionIO[Boolean] = def collectiveExists: ConnectionIO[Boolean] =
RCollective.existsById(data.collName) RCollective.existsByName(data.collName)
val msg = s"The collective '${data.collName}' already exists." val msg = s"The collective '${data.collName}' already exists."
for { for {
cu <- makeRecords(data.collName, data.login, data.password, AccountSource.Local) exists <- store.transact(collectiveExists)
save <- store.add(insert(cu._1, cu._2), collectiveExists) saved <-
} yield save.fold(identity, _.withMsg(msg), identity) if (exists) AddResult.entityExists(msg).pure[F]
else store.transact(insert).attempt.map(AddResult.fromUpdate)
} yield saved
} }
private def makeRecords(
collName: Ident,
login: Ident,
password: Password,
source: AccountSource
): F[(RCollective, RUser)] =
for {
id2 <- Ident.randomId[F]
now <- Timestamp.current[F]
c = RCollective.makeDefault(collName, now)
u = RUser.makeDefault(
id2,
login,
collName,
PasswordCrypt.crypt(password),
source,
now
)
} yield (c, u)
}) })
} }

View File

@ -12,7 +12,8 @@ import docspell.common._
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
import io.circe.{Decoder, Encoder} import io.circe.{Decoder, Encoder}
final case class DownloadZipArgs(accountId: AccountId, req: DownloadRequest) final case class DownloadZipArgs(account: AccountInfo, req: DownloadRequest)
extends TaskArguments
object DownloadZipArgs { object DownloadZipArgs {
val taskName: Ident = Ident.unsafe("download-query-zip") val taskName: Ident = Ident.unsafe("download-query-zip")

View File

@ -16,8 +16,18 @@ import scodec.bits.ByteVector
class AuthTokenTest extends CatsEffectSuite { class AuthTokenTest extends CatsEffectSuite {
val user = AccountId(Ident.unsafe("demo"), Ident.unsafe("demo")) val user = AccountInfo(
val john = AccountId(Ident.unsafe("demo"), Ident.unsafe("john")) CollectiveId(1),
Ident.unsafe("demo"),
Ident.unsafe("abc-def"),
Ident.unsafe("demo")
)
val john = AccountInfo(
CollectiveId(1),
Ident.unsafe("demo"),
Ident.unsafe("abc-hij"),
Ident.unsafe("john")
)
val secret = ByteVector.fromValidHex("caffee") val secret = ByteVector.fromValidHex("caffee")
val otherSecret = ByteVector.fromValidHex("16bad") val otherSecret = ByteVector.fromValidHex("16bad")

View File

@ -0,0 +1,48 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.common
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
import io.circe.{Decoder, Encoder}
final case class AccountInfo(
collectiveId: CollectiveId,
collective: Ident,
userId: Ident,
login: Ident
) {
def asAccountId: AccountId =
AccountId(collective, login)
def asString: String =
s"${collectiveId.value}/${collective.id}/${userId.id}/${login.id}"
}
object AccountInfo {
implicit val jsonDecoder: Decoder[AccountInfo] = deriveDecoder
implicit val jsonEncoder: Encoder[AccountInfo] = deriveEncoder
def parse(str: String): Either[String, AccountInfo] = {
val input = str.replaceAll("\\s+", "").trim
val invalid: Either[String, AccountInfo] =
Left(s"Cannot parse account info: $str")
input.split('/').toList match {
case collId :: collName :: userId :: login :: Nil =>
for {
cid <- collId.toLongOption.toRight(s"Invalid collective id: $collId")
cn <- Ident.fromString(collName)
uid <- Ident.fromString(userId)
un <- Ident.fromString(login)
} yield AccountInfo(CollectiveId(cid), cn, uid, un)
case _ =>
invalid
}
}
}

View File

@ -16,9 +16,9 @@ import io.circe.{Decoder, Encoder}
* collective is specified, it considers all attachments. * collective is specified, it considers all attachments.
*/ */
case class AllPreviewsArgs( case class AllPreviewsArgs(
collective: Option[Ident], collective: Option[CollectiveId],
storeMode: MakePreviewArgs.StoreMode storeMode: MakePreviewArgs.StoreMode
) ) extends TaskArguments
object AllPreviewsArgs { object AllPreviewsArgs {

View File

@ -0,0 +1,38 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.common
import io.circe.{Decoder, Encoder}
final class CollectiveId(val value: Long) extends AnyVal {
def valueAsString: String =
value.toString
def valueAsIdent: Ident =
Ident.unsafe(valueAsString)
override def toString =
s"CollectiveId($value)"
}
object CollectiveId {
val unknown: CollectiveId = CollectiveId(-1)
def apply(n: Long): CollectiveId = new CollectiveId(n)
def fromString(str: String): Either[String, CollectiveId] =
str.trim.toLongOption.map(CollectiveId(_)).toRight(s"Invalid collective id: $str")
def unsafeFromString(str: String): CollectiveId =
fromString(str).fold(sys.error, identity)
implicit val jsonEncoder: Encoder[CollectiveId] =
Encoder.encodeLong.contramap(_.value)
implicit val jsonDecoder: Decoder[CollectiveId] =
Decoder.decodeLong.map(CollectiveId.apply)
}

View File

@ -16,7 +16,7 @@ import io.circe.generic.semiauto._
* submitted by this task run in the realm of the collective (and only their files are * submitted by this task run in the realm of the collective (and only their files are
* considered). If it is empty, it is a system task and all files are considered. * considered). If it is empty, it is a system task and all files are considered.
*/ */
case class ConvertAllPdfArgs(collective: Option[Ident]) case class ConvertAllPdfArgs(collective: Option[CollectiveId]) extends TaskArguments
object ConvertAllPdfArgs { object ConvertAllPdfArgs {

View File

@ -18,9 +18,9 @@ import io.circe.generic.semiauto._
* with state `ItemState.Deleted`. * with state `ItemState.Deleted`.
*/ */
case class EmptyTrashArgs( case class EmptyTrashArgs(
collective: Ident, collective: CollectiveId,
minAge: Duration minAge: Duration
) { ) extends TaskArguments {
def makeSubject: String = def makeSubject: String =
s"Empty Trash: Remove older than ${minAge.toJava}" s"Empty Trash: Remove older than ${minAge.toJava}"
@ -35,8 +35,8 @@ object EmptyTrashArgs {
val defaultSchedule = CalEvent.unsafe("*-*-1/7 03:00:00 UTC") val defaultSchedule = CalEvent.unsafe("*-*-1/7 03:00:00 UTC")
def periodicTaskId(coll: Ident): Ident = def periodicTaskId(coll: CollectiveId): Ident =
Ident.unsafe(s"docspell") / taskName / coll Ident.unsafe(s"docspell") / taskName / coll.value
implicit val jsonEncoder: Encoder[EmptyTrashArgs] = implicit val jsonEncoder: Encoder[EmptyTrashArgs] =
deriveEncoder[EmptyTrashArgs] deriveEncoder[EmptyTrashArgs]
@ -45,5 +45,4 @@ object EmptyTrashArgs {
def parse(str: String): Either[Throwable, EmptyTrashArgs] = def parse(str: String): Either[Throwable, EmptyTrashArgs] =
str.parseJsonAs[EmptyTrashArgs] str.parseJsonAs[EmptyTrashArgs]
} }

View File

@ -20,7 +20,7 @@ sealed trait FileCategory { self: Product =>
final def id: Ident = final def id: Ident =
Ident.unsafe(self.productPrefix.toLowerCase) Ident.unsafe(self.productPrefix.toLowerCase)
def toFileKey(collective: Ident, fileId: Ident): FileKey = def toFileKey(collective: CollectiveId, fileId: Ident): FileKey =
common.FileKey(collective, this, fileId) common.FileKey(collective, this, fileId)
} }

View File

@ -14,7 +14,7 @@ import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
import io.circe.syntax._ import io.circe.syntax._
import io.circe.{Decoder, Encoder} import io.circe.{Decoder, Encoder}
/** This is the input to the `FileCopyTask`. The task copies all files from on /** This is the input to the `FileCopyTask`. The task copies all files from one
* FileRepository to one ore more target repositories. * FileRepository to one ore more target repositories.
* *
* If no `from` is given, the default file repository is used. For targets, a list of ids * If no `from` is given, the default file repository is used. For targets, a list of ids
@ -22,6 +22,7 @@ import io.circe.{Decoder, Encoder}
* selecting "all", it means all enabled stores. * selecting "all", it means all enabled stores.
*/ */
final case class FileCopyTaskArgs(from: Option[Ident], to: Selection) final case class FileCopyTaskArgs(from: Option[Ident], to: Selection)
extends TaskArguments
object FileCopyTaskArgs { object FileCopyTaskArgs {
val taskName = Ident.unsafe("copy-file-repositories") val taskName = Ident.unsafe("copy-file-repositories")

View File

@ -9,7 +9,7 @@ package docspell.common
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
import io.circe.{Decoder, Encoder} import io.circe.{Decoder, Encoder}
final case class FileIntegrityCheckArgs(pattern: FileKeyPart) {} final case class FileIntegrityCheckArgs(pattern: FileKeyPart) extends TaskArguments
object FileIntegrityCheckArgs { object FileIntegrityCheckArgs {
val taskName: Ident = Ident.unsafe("all-file-integrity-check") val taskName: Ident = Ident.unsafe("all-file-integrity-check")

View File

@ -9,9 +9,9 @@ package docspell.common
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
import io.circe.{Decoder, Encoder} import io.circe.{Decoder, Encoder}
final case class FileKey(collective: Ident, category: FileCategory, id: Ident) { final case class FileKey(collective: CollectiveId, category: FileCategory, id: Ident) {
override def toString = override def toString =
s"${collective.id}/${category.id.id}/${id.id}" s"${collective.value}/${category.id.id}/${id.id}"
} }
object FileKey { object FileKey {

View File

@ -17,9 +17,10 @@ object FileKeyPart {
case object Empty extends FileKeyPart case object Empty extends FileKeyPart
final case class Collective(collective: Ident) extends FileKeyPart final case class Collective(collective: CollectiveId) extends FileKeyPart
final case class Category(collective: Ident, category: FileCategory) extends FileKeyPart final case class Category(collective: CollectiveId, category: FileCategory)
extends FileKeyPart
final case class Key(key: FileKey) extends FileKeyPart final case class Key(key: FileKey) extends FileKeyPart
@ -37,7 +38,7 @@ object FileKeyPart {
implicit val jsonDecoder: Decoder[FileKeyPart] = implicit val jsonDecoder: Decoder[FileKeyPart] =
Decoder.instance { cursor => Decoder.instance { cursor =>
for { for {
cid <- cursor.getOrElse[Option[Ident]]("collective")(None) cid <- cursor.getOrElse[Option[CollectiveId]]("collective")(None)
cat <- cursor.getOrElse[Option[FileCategory]]("category")(None) cat <- cursor.getOrElse[Option[FileCategory]]("category")(None)
emptyObj = cursor.keys.exists(_.isEmpty) emptyObj = cursor.keys.exists(_.isEmpty)

View File

@ -27,6 +27,9 @@ case class Ident(id: String) {
def /(next: Ident): Ident = def /(next: Ident): Ident =
new Ident(id + Ident.concatChar + next.id) new Ident(id + Ident.concatChar + next.id)
def /(next: Number): Ident =
new Ident(id + Ident.concatChar + next)
def take(n: Int): Ident = def take(n: Int): Ident =
new Ident(id.take(n)) new Ident(id.take(n))
} }

View File

@ -15,10 +15,10 @@ import io.circe.{Decoder, Encoder}
* tasks that are configured for 'existing-item' are run. * tasks that are configured for 'existing-item' are run.
*/ */
final case class ItemAddonTaskArgs( final case class ItemAddonTaskArgs(
collective: Ident, collective: CollectiveId,
itemId: Ident, itemId: Ident,
addonRunConfigs: Set[Ident] addonRunConfigs: Set[Ident]
) ) extends TaskArguments
object ItemAddonTaskArgs { object ItemAddonTaskArgs {
val taskName: Ident = Ident.unsafe("addon-existing-item") val taskName: Ident = Ident.unsafe("addon-existing-item")

View File

@ -18,12 +18,11 @@ import io.circe.generic.semiauto._
* possible tags.. * possible tags..
*/ */
case class LearnClassifierArgs( case class LearnClassifierArgs(
collective: Ident collectiveId: CollectiveId
) { ) extends TaskArguments {
def makeSubject: String = def makeSubject: String =
"Learn tags" "Learn tags"
} }
object LearnClassifierArgs { object LearnClassifierArgs {
@ -37,5 +36,4 @@ object LearnClassifierArgs {
def parse(str: String): Either[Throwable, LearnClassifierArgs] = def parse(str: String): Either[Throwable, LearnClassifierArgs] =
str.parseJsonAs[LearnClassifierArgs] str.parseJsonAs[LearnClassifierArgs]
} }

View File

@ -14,7 +14,7 @@ import io.circe.{Decoder, Encoder}
*/ */
case class MakePageCountArgs( case class MakePageCountArgs(
attachment: Ident attachment: Ident
) ) extends TaskArguments
object MakePageCountArgs { object MakePageCountArgs {

View File

@ -16,7 +16,7 @@ import io.circe.{Decoder, Encoder}
case class MakePreviewArgs( case class MakePreviewArgs(
attachment: Ident, attachment: Ident,
store: MakePreviewArgs.StoreMode store: MakePreviewArgs.StoreMode
) ) extends TaskArguments
object MakePreviewArgs { object MakePreviewArgs {

View File

@ -17,12 +17,12 @@ import io.circe.generic.semiauto._
* This task is run for each new file to create a new item from it or to add this file as * This task is run for each new file to create a new item from it or to add this file as
* an attachment to an existing item. * an attachment to an existing item.
* *
* If the `itemId' is set to some value, the item is tried to load to amend with the * If the `itemId` is set to some value, the item is tried to load to amend with the
* given files. Otherwise a new item is created. * given files. Otherwise a new item is created.
* *
* It is also re-used by the 'ReProcessItem' task. * It is also re-used by the 'ReProcessItem' task.
*/ */
case class ProcessItemArgs(meta: ProcessMeta, files: List[File]) { case class ProcessItemArgs(meta: ProcessMeta, files: List[File]) extends TaskArguments {
def makeSubject: String = def makeSubject: String =
files.flatMap(_.name) match { files.flatMap(_.name) match {
@ -43,7 +43,7 @@ object ProcessItemArgs {
val multiUploadTaskName = Ident.unsafe("multi-upload-process") val multiUploadTaskName = Ident.unsafe("multi-upload-process")
case class ProcessMeta( case class ProcessMeta(
collective: Ident, collective: CollectiveId,
itemId: Option[Ident], itemId: Option[Ident],
language: Language, language: Language,
direction: Option[Direction], direction: Option[Direction],
@ -73,5 +73,4 @@ object ProcessItemArgs {
def parse(str: String): Either[Throwable, ProcessItemArgs] = def parse(str: String): Either[Throwable, ProcessItemArgs] =
str.parseJsonAs[ProcessItemArgs] str.parseJsonAs[ProcessItemArgs]
} }

View File

@ -9,7 +9,7 @@ package docspell.common
import io.circe._ import io.circe._
import io.circe.generic.semiauto._ import io.circe.generic.semiauto._
final case class ReIndexTaskArgs(collective: Option[Ident]) final case class ReIndexTaskArgs(collective: Option[CollectiveId]) extends TaskArguments
object ReIndexTaskArgs { object ReIndexTaskArgs {
val taskName = Ident.unsafe("full-text-reindex") val taskName = Ident.unsafe("full-text-reindex")
@ -17,7 +17,7 @@ object ReIndexTaskArgs {
def tracker(args: ReIndexTaskArgs): Ident = def tracker(args: ReIndexTaskArgs): Ident =
args.collective match { args.collective match {
case Some(cid) => case Some(cid) =>
cid / DocspellSystem.migrationTaskTracker cid.valueAsIdent / DocspellSystem.migrationTaskTracker
case None => case None =>
DocspellSystem.migrationTaskTracker DocspellSystem.migrationTaskTracker
} }

View File

@ -16,6 +16,7 @@ import io.circe.{Decoder, Encoder}
* list is empty, then all attachments are re-processed. * list is empty, then all attachments are re-processed.
*/ */
case class ReProcessItemArgs(itemId: Ident, attachments: List[Ident]) case class ReProcessItemArgs(itemId: Ident, attachments: List[Ident])
extends TaskArguments
object ReProcessItemArgs { object ReProcessItemArgs {

View File

@ -20,7 +20,7 @@ import io.circe.generic.semiauto._
*/ */
case class ScanMailboxArgs( case class ScanMailboxArgs(
// the docspell user account // the docspell user account
account: AccountId, account: AccountInfo,
// the configured imap connection // the configured imap connection
imapConnection: Ident, imapConnection: Ident,
// scan folders recursively // scan folders recursively
@ -49,7 +49,7 @@ case class ScanMailboxArgs(
postHandleAll: Option[Boolean], postHandleAll: Option[Boolean],
// Exclude the mail body when importing // Exclude the mail body when importing
attachmentsOnly: Option[Boolean] attachmentsOnly: Option[Boolean]
) ) extends TaskArguments
object ScanMailboxArgs { object ScanMailboxArgs {

View File

@ -9,7 +9,8 @@ package docspell.common
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
import io.circe.{Decoder, Encoder} import io.circe.{Decoder, Encoder}
final case class ScheduledAddonTaskArgs(collective: Ident, addonTaskId: Ident) final case class ScheduledAddonTaskArgs(collective: CollectiveId, addonTaskId: Ident)
extends TaskArguments
object ScheduledAddonTaskArgs { object ScheduledAddonTaskArgs {
val taskName: Ident = Ident.unsafe("addon-scheduled-task") val taskName: Ident = Ident.unsafe("addon-scheduled-task")

View File

@ -0,0 +1,14 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.common
/** A marker trait for task arguments.
*
* Arguments for tasks are stored as a JSON blob in the database. Changes in structure
* requires a corresponding database migration.
*/
trait TaskArguments

View File

@ -6,12 +6,12 @@
package docspell.common.bc package docspell.common.bc
import docspell.common.Ident import docspell.common.CollectiveId
trait BackendCommandRunner[F[_], A] { trait BackendCommandRunner[F[_], A] {
def run(collective: Ident, cmd: BackendCommand): F[A] def run(collective: CollectiveId, cmd: BackendCommand): F[A]
def runAll(collective: Ident, cmds: List[BackendCommand]): F[A] def runAll(collective: CollectiveId, cmds: List[BackendCommand]): F[A]
} }

View File

@ -15,9 +15,7 @@ trait StringSyntax {
Option(s).filter(_.trim.nonEmpty) Option(s).filter(_.trim.nonEmpty)
def parseJsonAs[A](implicit d: Decoder[A]): Either[Throwable, A] = def parseJsonAs[A](implicit d: Decoder[A]): Either[Throwable, A] =
for { parser.decode[A](s)
json <- parser.decode[A](s)
} yield json
} }
} }

View File

@ -67,7 +67,7 @@ trait FtsClient[F[_]] {
def updateItemName( def updateItemName(
logger: Logger[F], logger: Logger[F],
itemId: Ident, itemId: Ident,
collective: Ident, collective: CollectiveId,
language: Language, language: Language,
name: String name: String
): F[Unit] = ): F[Unit] =
@ -79,7 +79,7 @@ trait FtsClient[F[_]] {
def updateItemNotes( def updateItemNotes(
logger: Logger[F], logger: Logger[F],
itemId: Ident, itemId: Ident,
collective: Ident, collective: CollectiveId,
language: Language, language: Language,
notes: Option[String] notes: Option[String]
): F[Unit] = ): F[Unit] =
@ -92,7 +92,7 @@ trait FtsClient[F[_]] {
logger: Logger[F], logger: Logger[F],
itemId: Ident, itemId: Ident,
attachId: Ident, attachId: Ident,
collective: Ident, collective: CollectiveId,
language: Language, language: Language,
name: Option[String] name: Option[String]
): F[Unit] = ): F[Unit] =
@ -112,7 +112,7 @@ trait FtsClient[F[_]] {
def updateFolder( def updateFolder(
logger: Logger[F], logger: Logger[F],
itemId: Ident, itemId: Ident,
collective: Ident, collective: CollectiveId,
folder: Option[Ident] folder: Option[Ident]
): F[Unit] ): F[Unit]
@ -124,7 +124,7 @@ trait FtsClient[F[_]] {
def clearAll(logger: Logger[F]): F[Unit] def clearAll(logger: Logger[F]): F[Unit]
/** Clears the index from all data belonging to the given collective. */ /** Clears the index from all data belonging to the given collective. */
def clear(logger: Logger[F], collective: Ident): F[Unit] def clear(logger: Logger[F], collective: CollectiveId): F[Unit]
} }
@ -149,7 +149,7 @@ object FtsClient {
def updateFolder( def updateFolder(
logger: Logger[F], logger: Logger[F],
itemId: Ident, itemId: Ident,
collective: Ident, collective: CollectiveId,
folder: Option[Ident] folder: Option[Ident]
): F[Unit] = ): F[Unit] =
logger.warn("Full-text search is disabled!") logger.warn("Full-text search is disabled!")
@ -166,7 +166,7 @@ object FtsClient {
def clearAll(logger: Logger[F]): F[Unit] = def clearAll(logger: Logger[F]): F[Unit] =
logger.warn("Full-text search is disabled!") logger.warn("Full-text search is disabled!")
def clear(logger: Logger[F], collective: Ident): F[Unit] = def clear(logger: Logger[F], collective: CollectiveId): F[Unit] =
logger.warn("Full-text search is disabled!") logger.warn("Full-text search is disabled!")
} }
} }

View File

@ -21,7 +21,7 @@ import docspell.common._
*/ */
final case class FtsQuery( final case class FtsQuery(
q: String, q: String,
collective: Ident, collective: CollectiveId,
items: Set[Ident], items: Set[Ident],
folders: Set[Ident], folders: Set[Ident],
limit: Int, limit: Int,
@ -37,7 +37,7 @@ final case class FtsQuery(
} }
object FtsQuery { object FtsQuery {
def apply(q: String, collective: Ident, limit: Int, offset: Int): FtsQuery = def apply(q: String, collective: CollectiveId, limit: Int, offset: Int): FtsQuery =
FtsQuery(q, collective, Set.empty, Set.empty, limit, offset, HighlightSetting.default) FtsQuery(q, collective, Set.empty, Set.empty, limit, offset, HighlightSetting.default)
case class HighlightSetting(pre: String, post: String) case class HighlightSetting(pre: String, post: String)

View File

@ -29,7 +29,7 @@ object FtsResult {
case class ItemMatch( case class ItemMatch(
id: Ident, id: Ident,
itemId: Ident, itemId: Ident,
collectiveId: Ident, collectiveId: CollectiveId,
score: Double, score: Double,
data: MatchData data: MatchData
) )

View File

@ -14,7 +14,7 @@ sealed trait TextData {
def item: Ident def item: Ident
def collective: Ident def collective: CollectiveId
def folder: Option[Ident] def folder: Option[Ident]
@ -32,7 +32,7 @@ object TextData {
final case class Attachment( final case class Attachment(
item: Ident, item: Ident,
attachId: Ident, attachId: Ident,
collective: Ident, collective: CollectiveId,
folder: Option[Ident], folder: Option[Ident],
language: Language, language: Language,
name: Option[String], name: Option[String],
@ -46,7 +46,7 @@ object TextData {
def attachment( def attachment(
item: Ident, item: Ident,
attachId: Ident, attachId: Ident,
collective: Ident, collective: CollectiveId,
folder: Option[Ident], folder: Option[Ident],
lang: Language, lang: Language,
name: Option[String], name: Option[String],
@ -56,7 +56,7 @@ object TextData {
final case class Item( final case class Item(
item: Ident, item: Ident,
collective: Ident, collective: CollectiveId,
folder: Option[Ident], folder: Option[Ident],
name: Option[String], name: Option[String],
notes: Option[String], notes: Option[String],
@ -69,7 +69,7 @@ object TextData {
def item( def item(
item: Ident, item: Ident,
collective: Ident, collective: CollectiveId,
folder: Option[Ident], folder: Option[Ident],
name: Option[String], name: Option[String],
notes: Option[String], notes: Option[String],

View File

@ -0,0 +1,33 @@
drop index "ftspsql_search_ftsidx";
drop index "ftpsql_search_item_idx";
drop index "ftpsql_search_attach_idx";
drop index "ftpsql_search_folder_idx";
drop table "ftspsql_search" cascade;
create table "ftspsql_search"(
"id" varchar(254) not null primary key,
"item_id" varchar(254) not null,
"collective" bigint not null,
"lang" varchar(254) not null,
"attach_id" varchar(254),
"folder_id" varchar(254),
"updated_at" timestamptz not null default current_timestamp,
--- content columns
"attach_name" text,
"attach_content" text,
"item_name" text,
"item_notes" text,
--- index column
"fts_config" regconfig not null,
"text_index" tsvector
generated always as (
setweight(to_tsvector("fts_config", coalesce("attach_name", '')), 'B') ||
setweight(to_tsvector("fts_config", coalesce("item_name", '')), 'B') ||
setweight(to_tsvector("fts_config", coalesce("attach_content", '')), 'C') ||
setweight(to_tsvector("fts_config", coalesce("item_notes", '')), 'C')) stored
);
create index "ftspsql_search_ftsidx" on "ftspsql_search" using GIN ("text_index");
create index "ftpsql_search_item_idx" on "ftspsql_search"("item_id");
create index "ftpsql_search_attach_idx" on "ftspsql_search"("attach_id");
create index "ftpsql_search_folder_idx" on "ftspsql_search"("folder_id");

View File

@ -26,6 +26,8 @@ trait DoobieMeta {
implicit val metaLanguage: Meta[Language] = implicit val metaLanguage: Meta[Language] =
Meta[String].timap(Language.unsafe)(_.iso3) Meta[String].timap(Language.unsafe)(_.iso3)
implicit val metaCollectiveId: Meta[CollectiveId] =
Meta[Long].timap(CollectiveId(_))(_.value)
} }
object DoobieMeta { object DoobieMeta {

View File

@ -8,13 +8,13 @@ package docspell.ftspsql
import cats.syntax.all._ import cats.syntax.all._
import docspell.common.{Ident, Language} import docspell.common.{CollectiveId, Ident, Language}
import docspell.ftsclient.TextData import docspell.ftsclient.TextData
final case class FtsRecord( final case class FtsRecord(
id: Ident, id: Ident,
itemId: Ident, itemId: Ident,
collective: Ident, collective: CollectiveId,
language: Language, language: Language,
attachId: Option[Ident], attachId: Option[Ident],
folderId: Option[Ident], folderId: Option[Ident],

View File

@ -16,8 +16,15 @@ import doobie._
import doobie.implicits._ import doobie.implicits._
object FtsRepository extends DoobieMeta { object FtsRepository extends DoobieMeta {
private[this] val logger = docspell.logging.getLogger[ConnectionIO]
val table = fr"ftspsql_search" val table = fr"ftspsql_search"
def containsData: ConnectionIO[Boolean] =
sql"select id from $table limit 1".query[String].option.map(_.isDefined)
def containsNoData: ConnectionIO[Boolean] =
containsData.map(!_)
def searchSummary(pq: PgQueryParser, rn: RankNormalization)( def searchSummary(pq: PgQueryParser, rn: RankNormalization)(
q: FtsQuery q: FtsQuery
): ConnectionIO[SearchSummary] = { ): ConnectionIO[SearchSummary] = {
@ -56,6 +63,7 @@ object FtsRepository extends DoobieMeta {
val query = mkQueryPart(pq, q) val query = mkQueryPart(pq, q)
val sqlFrag =
sql"""select $select sql"""select $select
|from $table, $query |from $table, $query
|where ${mkCondition(q)} AND query @@ text_index |where ${mkCondition(q)} AND query @@ text_index
@ -63,8 +71,9 @@ object FtsRepository extends DoobieMeta {
|limit ${q.limit} |limit ${q.limit}
|offset ${q.offset} |offset ${q.offset}
|""".stripMargin |""".stripMargin
.query[SearchResult]
.to[Vector] logger.asUnsafe.trace(s"PSQL Fulltext query: $sqlFrag")
sqlFrag.query[SearchResult].to[Vector]
} }
private def mkCondition(q: FtsQuery): Fragment = { private def mkCondition(q: FtsQuery): Fragment = {
@ -78,7 +87,7 @@ object FtsRepository extends DoobieMeta {
val folders = val folders =
NonEmptyList.fromList(q.folders.toList).map { nel => NonEmptyList.fromList(q.folders.toList).map { nel =>
val ids = nel.map(id => fr"$id").reduceLeft(_ ++ fr"," ++ _) val ids = nel.map(id => fr"$id").reduceLeft(_ ++ fr"," ++ _)
fr"folder_id in ($ids)" fr"(folder_id in ($ids) or folder_id is null)"
} }
List(items, folders).flatten.foldLeft(coll)(_ ++ fr"AND" ++ _) List(items, folders).flatten.foldLeft(coll)(_ ++ fr"AND" ++ _)
@ -139,7 +148,7 @@ object FtsRepository extends DoobieMeta {
def updateFolder( def updateFolder(
itemId: Ident, itemId: Ident,
collective: Ident, collective: CollectiveId,
folder: Option[Ident] folder: Option[Ident]
): ConnectionIO[Int] = ): ConnectionIO[Int] =
(sql"UPDATE $table" ++ (sql"UPDATE $table" ++
@ -155,7 +164,7 @@ object FtsRepository extends DoobieMeta {
def deleteAll: ConnectionIO[Int] = def deleteAll: ConnectionIO[Int] =
sql"DELETE FROM $table".update.run sql"DELETE FROM $table".update.run
def delete(collective: Ident): ConnectionIO[Int] = def delete(collective: CollectiveId): ConnectionIO[Int] =
sql"DELETE FROM $table WHERE collective = $collective".update.run sql"DELETE FROM $table WHERE collective = $collective".update.run
def resetAll: ConnectionIO[Int] = { def resetAll: ConnectionIO[Int] = {

View File

@ -26,6 +26,8 @@ final class PsqlFtsClient[F[_]: Sync](cfg: PsqlConfig, xa: Transactor[F])
val engine = Ident.unsafe("postgres") val engine = Ident.unsafe("postgres")
val config = cfg val config = cfg
private[this] val logger = docspell.logging.getLogger[F]
private[ftspsql] val transactor = xa private[ftspsql] val transactor = xa
private[this] val searchSummary = private[this] val searchSummary =
@ -46,6 +48,16 @@ final class PsqlFtsClient[F[_]: Sync](cfg: PsqlConfig, xa: Transactor[F])
engine, engine,
"initialize", "initialize",
DbMigration[F](cfg).run.as(FtsMigration.Result.WorkDone) DbMigration[F](cfg).run.as(FtsMigration.Result.WorkDone)
),
FtsMigration(
1,
engine,
"Re-Index if empty",
FtsRepository.containsNoData
.transact(xa)
.map(empty =>
if (empty) FtsMigration.Result.IndexAll else FtsMigration.Result.WorkDone
)
) )
) )
) )
@ -73,6 +85,7 @@ final class PsqlFtsClient[F[_]: Sync](cfg: PsqlConfig, xa: Transactor[F])
summary <- searchSummary(q).transact(xa) summary <- searchSummary(q).transact(xa)
results <- search(q, true).transact(xa) results <- search(q, true).transact(xa)
endNanos <- Sync[F].delay(System.nanoTime()) endNanos <- Sync[F].delay(System.nanoTime())
_ <- logger.debug(s"PSQL fulltext search hits: ${results.size}")
duration = Duration.nanos(endNanos - startNanos) duration = Duration.nanos(endNanos - startNanos)
res = SearchResult res = SearchResult
.toFtsResult(summary, results) .toFtsResult(summary, results)
@ -104,11 +117,11 @@ final class PsqlFtsClient[F[_]: Sync](cfg: PsqlConfig, xa: Transactor[F])
def updateFolder( def updateFolder(
logger: Logger[F], logger: Logger[F],
itemId: Ident, itemId: Ident,
collective: Ident, collective: CollectiveId,
folder: Option[Ident] folder: Option[Ident]
): F[Unit] = ): F[Unit] =
logger.debug(s"Update folder '${folder logger.debug(s"Update folder '${folder
.map(_.id)}' in fts for collective ${collective.id} and item ${itemId.id}") *> .map(_.id)}' in fts for collective ${collective.value} and item ${itemId.id}") *>
FtsRepository.updateFolder(itemId, collective, folder).transact(xa).void FtsRepository.updateFolder(itemId, collective, folder).transact(xa).void
def removeItem(logger: Logger[F], itemId: Ident): F[Unit] = def removeItem(logger: Logger[F], itemId: Ident): F[Unit] =
@ -123,8 +136,8 @@ final class PsqlFtsClient[F[_]: Sync](cfg: PsqlConfig, xa: Transactor[F])
logger.info(s"Deleting complete FTS index") *> logger.info(s"Deleting complete FTS index") *>
FtsRepository.deleteAll.transact(xa).void FtsRepository.deleteAll.transact(xa).void
def clear(logger: Logger[F], collective: Ident): F[Unit] = def clear(logger: Logger[F], collective: CollectiveId): F[Unit] =
logger.info(s"Deleting index for collective ${collective.id}") *> logger.info(s"Deleting index for collective ${collective.value}") *>
FtsRepository.delete(collective).transact(xa).void FtsRepository.delete(collective).transact(xa).void
} }

View File

@ -13,7 +13,7 @@ import docspell.ftsclient.FtsResult.{ItemMatch, MatchData}
final case class SearchResult( final case class SearchResult(
id: Ident, id: Ident,
itemId: Ident, itemId: Ident,
collective: Ident, collective: CollectiveId,
language: Language, language: Language,
attachId: Option[Ident], attachId: Option[Ident],
folderId: Option[Ident], folderId: Option[Ident],

View File

@ -51,8 +51,8 @@ trait PgFixtures {
self.transact(client.transactor) self.transact(client.transactor)
} }
val collective1 = ident("coll1") val collective1 = CollectiveId(1)
val collective2 = ident("coll2") val collective2 = CollectiveId(2)
val itemData: TextData.Item = val itemData: TextData.Item =
TextData.Item( TextData.Item(

View File

@ -17,7 +17,8 @@ import io.circe.syntax._
trait JsonCodec { trait JsonCodec {
implicit def attachmentEncoder(implicit implicit def attachmentEncoder(implicit
enc: Encoder[Ident] enc: Encoder[Ident],
encCid: Encoder[CollectiveId]
): Encoder[TextData.Attachment] = ): Encoder[TextData.Attachment] =
new Encoder[TextData.Attachment] { new Encoder[TextData.Attachment] {
final def apply(td: TextData.Attachment): Json = { final def apply(td: TextData.Attachment): Json = {
@ -28,7 +29,7 @@ trait JsonCodec {
cnt :: List( cnt :: List(
(Field.id.name, enc(td.id)), (Field.id.name, enc(td.id)),
(Field.itemId.name, enc(td.item)), (Field.itemId.name, enc(td.item)),
(Field.collectiveId.name, enc(td.collective)), (Field.collectiveId.name, encCid(td.collective)),
(Field.folderId.name, td.folder.getOrElse(Ident.unsafe("")).asJson), (Field.folderId.name, td.folder.getOrElse(Ident.unsafe("")).asJson),
(Field.attachmentId.name, enc(td.attachId)), (Field.attachmentId.name, enc(td.attachId)),
(Field.attachmentName.name, Json.fromString(td.name.getOrElse(""))), (Field.attachmentName.name, Json.fromString(td.name.getOrElse(""))),
@ -39,13 +40,16 @@ trait JsonCodec {
} }
} }
implicit def itemEncoder(implicit enc: Encoder[Ident]): Encoder[TextData.Item] = implicit def itemEncoder(implicit
enc: Encoder[Ident],
encCid: Encoder[CollectiveId]
): Encoder[TextData.Item] =
new Encoder[TextData.Item] { new Encoder[TextData.Item] {
final def apply(td: TextData.Item): Json = final def apply(td: TextData.Item): Json =
Json.obj( Json.obj(
(Field.id.name, enc(td.id)), (Field.id.name, enc(td.id)),
(Field.itemId.name, enc(td.item)), (Field.itemId.name, enc(td.item)),
(Field.collectiveId.name, enc(td.collective)), (Field.collectiveId.name, encCid(td.collective)),
(Field.folderId.name, td.folder.getOrElse(Ident.unsafe("")).asJson), (Field.folderId.name, td.folder.getOrElse(Ident.unsafe("")).asJson),
(Field.itemName.name, Json.fromString(td.name.getOrElse(""))), (Field.itemName.name, Json.fromString(td.name.getOrElse(""))),
(Field.itemNotes.name, Json.fromString(td.notes.getOrElse(""))), (Field.itemNotes.name, Json.fromString(td.notes.getOrElse(""))),
@ -121,7 +125,7 @@ trait JsonCodec {
for { for {
itemId <- c.get[Ident](Field.itemId.name) itemId <- c.get[Ident](Field.itemId.name)
id <- c.get[Ident](Field.id.name) id <- c.get[Ident](Field.id.name)
coll <- c.get[Ident](Field.collectiveId.name) coll <- c.get[CollectiveId](Field.collectiveId.name)
score <- c.get[Double]("score") score <- c.get[Double]("score")
md <- decodeMatchData(c) md <- decodeMatchData(c)
} yield FtsResult.ItemMatch(id, itemId, coll, score, md) } yield FtsResult.ItemMatch(id, itemId, coll, score, md)

View File

@ -51,7 +51,7 @@ object QueryData {
val items = fq.items.map(_.id).mkString(" ") val items = fq.items.map(_.id).mkString(" ")
val folders = fq.folders.map(_.id).mkString(" ") val folders = fq.folders.map(_.id).mkString(" ")
val filterQ = List( val filterQ = List(
s"""${Field.collectiveId.name}:"${fq.collective.id}"""", s"""${Field.collectiveId.name}:"${fq.collective.value}"""",
fq.items match { fq.items match {
case s if s.isEmpty => case s if s.isEmpty =>
"" ""

View File

@ -41,11 +41,11 @@ final class SolrFtsClient[F[_]: Async](
def updateFolder( def updateFolder(
logger: Logger[F], logger: Logger[F],
itemId: Ident, itemId: Ident,
collective: Ident, collective: CollectiveId,
folder: Option[Ident] folder: Option[Ident]
): F[Unit] = ): F[Unit] =
logger.debug( logger.debug(
s"Update folder in solr index for coll/item ${collective.id}/${itemId.id}" s"Update folder in solr index for coll/item ${collective.value}/${itemId.id}"
) *> ) *>
solrUpdate.updateFolder(itemId, collective, folder) solrUpdate.updateFolder(itemId, collective, folder)
@ -75,9 +75,9 @@ final class SolrFtsClient[F[_]: Async](
logger.info("Deleting complete full-text index!") *> logger.info("Deleting complete full-text index!") *>
solrUpdate.delete("*:*", Option(0)) solrUpdate.delete("*:*", Option(0))
def clear(logger: Logger[F], collective: Ident): F[Unit] = def clear(logger: Logger[F], collective: CollectiveId): F[Unit] =
logger.info(s"Deleting full-text index for collective ${collective.id}") *> logger.info(s"Deleting full-text index for collective ${collective.value}") *>
solrUpdate.delete(s"${Field.collectiveId.name}:${collective.id}", Option(0)) solrUpdate.delete(s"${Field.collectiveId.name}:${collective.value}", Option(0))
} }
object SolrFtsClient { object SolrFtsClient {

View File

@ -155,7 +155,8 @@ object SolrSetup {
"Add polish", "Add polish",
addContentField(Language.Polish) addContentField(Language.Polish)
), ),
SolrMigration.reIndexAll(26, "Re-Index after adding polish content field") SolrMigration.reIndexAll(26, "Re-Index after adding polish content field"),
SolrMigration.reIndexAll(27, "Re-Index after collective-id change")
) )
def addFolderField: F[Unit] = def addFolderField: F[Unit] =

View File

@ -27,7 +27,11 @@ trait SolrUpdate[F[_]] {
def update(tds: List[TextData]): F[Unit] def update(tds: List[TextData]): F[Unit]
def updateFolder(itemId: Ident, collective: Ident, folder: Option[Ident]): F[Unit] def updateFolder(
itemId: Ident,
collective: CollectiveId,
folder: Option[Ident]
): F[Unit]
def updateVersionDoc(doc: VersionDoc): F[Unit] def updateVersionDoc(doc: VersionDoc): F[Unit]
@ -63,13 +67,13 @@ object SolrUpdate {
def updateFolder( def updateFolder(
itemId: Ident, itemId: Ident,
collective: Ident, collective: CollectiveId,
folder: Option[Ident] folder: Option[Ident]
): F[Unit] = { ): F[Unit] = {
val queryUrl = Uri.unsafeFromString(cfg.url.asString) / "query" val queryUrl = Uri.unsafeFromString(cfg.url.asString) / "query"
val q = QueryData( val q = QueryData(
"*:*", "*:*",
s"${Field.itemId.name}:${itemId.id} AND ${Field.collectiveId.name}:${collective.id}", s"${Field.itemId.name}:${itemId.id} AND ${Field.collectiveId.name}:${collective.value}",
Int.MaxValue, Int.MaxValue,
0, 0,
List(Field.id), List(Field.id),
@ -97,8 +101,7 @@ object SolrUpdate {
client.expect[Unit](req) client.expect[Unit](req)
} }
private val minOneChange: TextData => Boolean = private val minOneChange: TextData => Boolean = {
_ match {
case td: TextData.Attachment => case td: TextData.Attachment =>
td.name.isDefined || td.text.isDefined td.name.isDefined || td.text.isDefined
case td: TextData.Item => case td: TextData.Item =>

View File

@ -11,6 +11,7 @@ import cats.implicits._
import fs2.concurrent.SignallingRef import fs2.concurrent.SignallingRef
import docspell.backend.MailAddressCodec import docspell.backend.MailAddressCodec
import docspell.backend.joex.FindJobOwnerAccount
import docspell.backend.ops._ import docspell.backend.ops._
import docspell.common._ import docspell.common._
import docspell.joex.emptytrash._ import docspell.joex.emptytrash._
@ -85,7 +86,7 @@ final class JoexAppImpl[F[_]: Async](
.evalMap { es => .evalMap { es =>
val args = EmptyTrashArgs(es.cid, es.minAge) val args = EmptyTrashArgs(es.cid, es.minAge)
uts.updateOneTask( uts.updateOneTask(
UserTaskScope(args.collective), UserTaskScope.collective(args.collective),
args.makeSubject.some, args.makeSubject.some,
EmptyTrashTask.userTask(args, es.schedule) EmptyTrashTask.userTask(args, es.schedule)
) )
@ -117,6 +118,7 @@ object JoexAppImpl extends MailAddressCodec {
jobStoreModule = JobStoreModuleBuilder(store) jobStoreModule = JobStoreModuleBuilder(store)
.withPubsub(pubSubT) .withPubsub(pubSubT)
.withEventSink(notificationMod) .withEventSink(notificationMod)
.withFindJobOwner(FindJobOwnerAccount(store))
.build .build
tasks <- JoexTasks.resource( tasks <- JoexTasks.resource(

View File

@ -49,7 +49,7 @@ object GenericItemAddonTask extends LoggerExtension {
trigger: AddonTriggerType, trigger: AddonTriggerType,
addonTaskIds: Set[Ident] addonTaskIds: Set[Ident]
)( )(
collective: Ident, collective: CollectiveId,
data: ItemData, data: ItemData,
maybeMeta: Option[ProcessItemArgs.ProcessMeta] maybeMeta: Option[ProcessItemArgs.ProcessMeta]
): Task[F, Unit, ItemData] = ): Task[F, Unit, ItemData] =
@ -63,7 +63,7 @@ object GenericItemAddonTask extends LoggerExtension {
trigger: AddonTriggerType, trigger: AddonTriggerType,
addonTaskIds: Set[Ident] addonTaskIds: Set[Ident]
)( )(
collective: Ident, collective: CollectiveId,
data: ItemData, data: ItemData,
maybeMeta: Option[ProcessItemArgs.ProcessMeta] maybeMeta: Option[ProcessItemArgs.ProcessMeta]
): Task[F, Unit, ExecResult] = ): Task[F, Unit, ExecResult] =

View File

@ -18,7 +18,7 @@ import docspell.store.queries.QCollective
import io.circe.generic.semiauto._ import io.circe.generic.semiauto._
import io.circe.{Decoder, Encoder} import io.circe.{Decoder, Encoder}
case class NerFile(collective: Ident, updated: Timestamp, creation: Timestamp) { case class NerFile(collective: CollectiveId, updated: Timestamp, creation: Timestamp) {
def nerFilePath(directory: Path): Path = def nerFilePath(directory: Path): Path =
NerFile.nerFilePath(directory, collective) NerFile.nerFilePath(directory, collective)
@ -33,14 +33,14 @@ object NerFile {
implicit val jsonEncoder: Encoder[NerFile] = implicit val jsonEncoder: Encoder[NerFile] =
deriveEncoder[NerFile] deriveEncoder[NerFile]
private def nerFilePath(directory: Path, collective: Ident): Path = private def nerFilePath(directory: Path, collective: CollectiveId): Path =
directory.resolve(s"${collective.id}.txt") directory.resolve(s"${collective.value}.txt")
private def jsonFilePath(directory: Path, collective: Ident): Path = private def jsonFilePath(directory: Path, collective: CollectiveId): Path =
directory.resolve(s"${collective.id}.json") directory.resolve(s"${collective.value}.json")
def find[F[_]: Async]( def find[F[_]: Async](
collective: Ident, collective: CollectiveId,
directory: Path directory: Path
): F[Option[NerFile]] = { ): F[Option[NerFile]] = {
val file = jsonFilePath(directory, collective) val file = jsonFilePath(directory, collective)

View File

@ -24,7 +24,7 @@ import io.circe.syntax._
/** Maintains a custom regex-ner file per collective for stanford's regexner annotator. */ /** Maintains a custom regex-ner file per collective for stanford's regexner annotator. */
trait RegexNerFile[F[_]] { trait RegexNerFile[F[_]] {
def makeFile(collective: Ident): F[Option[Path]] def makeFile(collective: CollectiveId): F[Option[Path]]
} }
@ -49,11 +49,11 @@ object RegexNerFile {
private[this] val logger = docspell.logging.getLogger[F] private[this] val logger = docspell.logging.getLogger[F]
def makeFile(collective: Ident): F[Option[Path]] = def makeFile(collective: CollectiveId): F[Option[Path]] =
if (cfg.maxEntries > 0) doMakeFile(collective) if (cfg.maxEntries > 0) doMakeFile(collective)
else (None: Option[Path]).pure[F] else (None: Option[Path]).pure[F]
def doMakeFile(collective: Ident): F[Option[Path]] = def doMakeFile(collective: CollectiveId): F[Option[Path]] =
for { for {
now <- Timestamp.current[F] now <- Timestamp.current[F]
existing <- NerFile.find[F](collective, cfg.directory) existing <- NerFile.find[F](collective, cfg.directory)
@ -75,7 +75,7 @@ object RegexNerFile {
} yield result } yield result
private def updateFile( private def updateFile(
collective: Ident, collective: CollectiveId,
now: Timestamp, now: Timestamp,
current: Option[NerFile] current: Option[NerFile]
): F[Option[Path]] = ): F[Option[Path]] =
@ -95,7 +95,7 @@ object RegexNerFile {
) *> cur.pure[F] ) *> cur.pure[F]
else else
logger.debug( logger.debug(
s"There have been state changes for collective '${collective.id}'. Reload NER file." s"There have been state changes for collective '${collective.value}'. Reload NER file."
) *> createFile(lup, collective, now) ) *> createFile(lup, collective, now)
nerf.map(_.nerFilePath(cfg.directory).some) nerf.map(_.nerFilePath(cfg.directory).some)
case None => case None =>
@ -119,7 +119,7 @@ object RegexNerFile {
private def createFile( private def createFile(
lastUpdate: Timestamp, lastUpdate: Timestamp,
collective: Ident, collective: CollectiveId,
now: Timestamp now: Timestamp
): F[NerFile] = { ): F[NerFile] = {
def update(nf: NerFile, text: String): F[Unit] = def update(nf: NerFile, text: String): F[Unit] =
@ -127,7 +127,7 @@ object RegexNerFile {
for { for {
jsonFile <- Sync[F].pure(nf.jsonFilePath(cfg.directory)) jsonFile <- Sync[F].pure(nf.jsonFilePath(cfg.directory))
_ <- logger.debug( _ <- logger.debug(
s"Writing custom NER file for collective '${collective.id}'" s"Writing custom NER file for collective '${collective.value}'"
) )
_ <- jsonFile.parent match { _ <- jsonFile.parent match {
case Some(p) => File.mkDir(p) case Some(p) => File.mkDir(p)
@ -139,7 +139,9 @@ object RegexNerFile {
) )
for { for {
_ <- logger.info(s"Generating custom NER file for collective '${collective.id}'") _ <- logger.info(
s"Generating custom NER file for collective '${collective.value}'"
)
names <- store.transact(QCollective.allNames(collective, cfg.maxEntries)) names <- store.transact(QCollective.allNames(collective, cfg.maxEntries))
nerFile = NerFile(collective, lastUpdate, now) nerFile = NerFile(collective, lastUpdate, now)
_ <- update(nerFile, NerFile.mkNerConfig(names)) _ <- update(nerFile, NerFile.mkNerConfig(names))
@ -152,8 +154,8 @@ object RegexNerFile {
import docspell.store.qb.DSL._ import docspell.store.qb.DSL._
import docspell.store.qb._ import docspell.store.qb._
def latestUpdate(collective: Ident): ConnectionIO[Option[Timestamp]] = { def latestUpdate(collective: CollectiveId): ConnectionIO[Option[Timestamp]] = {
def max_(col: Column[_], cidCol: Column[Ident]): Select = def max_(col: Column[_], cidCol: Column[CollectiveId]): Select =
Select(max(col).as("t"), from(col.table), cidCol === collective) Select(max(col).as("t"), from(col.table), cidCol === collective)
val sql = union( val sql = union(

View File

@ -35,7 +35,7 @@ object DownloadZipTask {
): Task[F, Args, Result] = ): Task[F, Args, Result] =
Task { ctx => Task { ctx =>
val req = ctx.args.req val req = ctx.args.req
val query = req.toQuery(ctx.args.accountId) val query = req.toQuery(ctx.args.account)
val allFiles = val allFiles =
Stream Stream
@ -53,7 +53,7 @@ object DownloadZipTask {
.through(Zip[F](ctx.logger.some).zip(chunkSize)) .through(Zip[F](ctx.logger.some).zip(chunkSize))
.through( .through(
store.fileRepo.save( store.fileRepo.save(
ctx.args.accountId.collective, ctx.args.account.collectiveId,
FileCategory.DownloadAll, FileCategory.DownloadAll,
MimeTypeHint.advertised("application/zip") MimeTypeHint.advertised("application/zip")
) )
@ -61,10 +61,10 @@ object DownloadZipTask {
for { for {
_ <- ctx.logger.info(s"Start zipping ${req.itemQueryString}") _ <- ctx.logger.info(s"Start zipping ${req.itemQueryString}")
summary <- downloadOps.getSummary(ctx.args.accountId, req) summary <- downloadOps.getSummary(ctx.args.account, req)
_ <- ctx.logger.debug(s"Summary: $summary") _ <- ctx.logger.debug(s"Summary: $summary")
file <- storeZipFile.compile.lastOrError file <- storeZipFile.compile.lastOrError
row <- createRow(summary, ctx.args.accountId.collective, file) row <- createRow(summary, ctx.args.account.collectiveId, file)
_ <- ctx.logger.debug(s"Inserting zip file: $row") _ <- ctx.logger.debug(s"Inserting zip file: $row")
_ <- store.transact(RDownloadQuery.insert(row)) _ <- store.transact(RDownloadQuery.insert(row))
} yield Result(summary.fileCount) } yield Result(summary.fileCount)
@ -92,7 +92,7 @@ object DownloadZipTask {
def createRow[F[_]: Sync]( def createRow[F[_]: Sync](
summary: DownloadSummary, summary: DownloadSummary,
cid: Ident, cid: CollectiveId,
file: FileKey file: FileKey
): F[RDownloadQuery] = ): F[RDownloadQuery] =
Timestamp.current[F].map { now => Timestamp.current[F].map { now =>

View File

@ -68,7 +68,7 @@ object EmptyTrashTask {
.foldMonoid .foldMonoid
private def deleteChunk[F[_]: Async]( private def deleteChunk[F[_]: Async](
collective: Ident, collective: CollectiveId,
itemOps: OItem[F], itemOps: OItem[F],
ctx: Context[F, _] ctx: Context[F, _]
)(chunk: Vector[RItem]): F[Int] = )(chunk: Vector[RItem]): F[Int] =

View File

@ -74,10 +74,10 @@ object FtsWork {
def log[F[_]](f: Logger[F] => F[Unit]): FtsWork[F] = def log[F[_]](f: Logger[F] => F[Unit]): FtsWork[F] =
FtsWork(ctx => f(ctx.logger)) FtsWork(ctx => f(ctx.logger))
def clearIndex[F[_]: FlatMap](coll: Option[Ident]): FtsWork[F] = def clearIndex[F[_]: FlatMap](coll: Option[CollectiveId]): FtsWork[F] =
coll match { coll match {
case Some(cid) => case Some(cid) =>
log[F](_.debug(s"Clearing index data for collective '${cid.id}'")) ++ FtsWork( log[F](_.debug(s"Clearing index data for collective '${cid.value}'")) ++ FtsWork(
ctx => ctx.fts.clear(ctx.logger, cid) ctx => ctx.fts.clear(ctx.logger, cid)
) )
case None => case None =>
@ -86,7 +86,7 @@ object FtsWork {
) )
} }
def insertAll[F[_]: FlatMap](coll: Option[Ident]): FtsWork[F] = def insertAll[F[_]: FlatMap](coll: Option[CollectiveId]): FtsWork[F] =
log[F](_.info("Inserting all data to index")) ++ FtsWork log[F](_.info("Inserting all data to index")) ++ FtsWork
.all( .all(
FtsWork(ctx => FtsWork(ctx =>

View File

@ -13,6 +13,7 @@ import docspell.backend.fulltext.CreateIndex
import docspell.common._ import docspell.common._
import docspell.ftsclient._ import docspell.ftsclient._
import docspell.joex.Config import docspell.joex.Config
import docspell.scheduler.usertask.UserTaskScope
import docspell.scheduler.{Job, Task} import docspell.scheduler.{Job, Task}
import docspell.store.Store import docspell.store.Store
@ -43,10 +44,9 @@ object MigrationTask {
Job Job
.createNew( .createNew(
taskName, taskName,
DocspellSystem.taskGroup, UserTaskScope.system,
(), (),
"Create full-text index", "Create full-text index",
DocspellSystem.taskGroup,
Priority.Low, Priority.Low,
Some(DocspellSystem.migrationTaskTracker) Some(DocspellSystem.migrationTaskTracker)
) )

View File

@ -40,7 +40,7 @@ object ReIndexTask {
def onCancel[F[_]]: Task[F, Args, Unit] = def onCancel[F[_]]: Task[F, Args, Unit] =
Task.log[F, Args](_.warn("Cancelling full-text re-index task")) Task.log[F, Args](_.warn("Cancelling full-text re-index task"))
private def clearData[F[_]: Async](collective: Option[Ident]): FtsWork[F] = private def clearData[F[_]: Async](collective: Option[CollectiveId]): FtsWork[F] =
FtsWork.log[F](_.info("Clearing index data")) ++ FtsWork.log[F](_.info("Clearing index data")) ++
(collective match { (collective match {
case Some(_) => case Some(_) =>

View File

@ -9,7 +9,7 @@ package docspell.joex.learn
import cats.data.NonEmptyList import cats.data.NonEmptyList
import cats.implicits._ import cats.implicits._
import docspell.common.Ident import docspell.common.CollectiveId
import docspell.store.records.{RClassifierModel, RClassifierSetting} import docspell.store.records.{RClassifierModel, RClassifierSetting}
import doobie._ import doobie._
@ -37,12 +37,12 @@ object ClassifierName {
val correspondentPerson: ClassifierName = val correspondentPerson: ClassifierName =
apply("correspondentperson") apply("correspondentperson")
def findTagClassifiers[F[_]](coll: Ident): ConnectionIO[List[ClassifierName]] = def findTagClassifiers(coll: CollectiveId): ConnectionIO[List[ClassifierName]] =
for { for {
categories <- RClassifierSetting.getActiveCategories(coll) categories <- RClassifierSetting.getActiveCategories(coll)
} yield categories.map(tagCategory) } yield categories.map(tagCategory)
def findTagModels[F[_]](coll: Ident): ConnectionIO[List[RClassifierModel]] = def findTagModels(coll: CollectiveId): ConnectionIO[List[RClassifierModel]] =
for { for {
categories <- RClassifierSetting.getActiveCategories(coll) categories <- RClassifierSetting.getActiveCategories(coll)
models <- NonEmptyList.fromList(categories) match { models <- NonEmptyList.fromList(categories) match {
@ -53,7 +53,9 @@ object ClassifierName {
} }
} yield models } yield models
def findOrphanTagModels[F[_]](coll: Ident): ConnectionIO[List[RClassifierModel]] = def findOrphanTagModels(
coll: CollectiveId
): ConnectionIO[List[RClassifierModel]] =
for { for {
cats <- RClassifierSetting.getActiveCategories(coll) cats <- RClassifierSetting.getActiveCategories(coll)
allModels = RClassifierModel.findAllByQuery(coll, s"$categoryPrefix%") allModels = RClassifierModel.findAllByQuery(coll, s"$categoryPrefix%")

View File

@ -26,7 +26,7 @@ object Classify {
workingDir: Path, workingDir: Path,
store: Store[F], store: Store[F],
classifier: TextClassifier[F], classifier: TextClassifier[F],
coll: Ident, coll: CollectiveId,
text: String text: String
)(cname: ClassifierName): F[Option[String]] = )(cname: ClassifierName): F[Option[String]] =
(for { (for {

View File

@ -48,7 +48,7 @@ object LearnClassifierTask {
.learnAll( .learnAll(
analyser, analyser,
store, store,
ctx.args.collective, ctx.args.collectiveId,
cfg.classification.itemCount, cfg.classification.itemCount,
cfg.maxLength cfg.maxLength
) )
@ -69,7 +69,7 @@ object LearnClassifierTask {
_ <- OptionT.liftF( _ <- OptionT.liftF(
LearnTags LearnTags
.learnAllTagCategories(analyser, store)( .learnAllTagCategories(analyser, store)(
ctx.args.collective, ctx.args.collectiveId,
maxItems, maxItems,
cfg.maxLength cfg.maxLength
) )
@ -82,7 +82,7 @@ object LearnClassifierTask {
clearObsoleteTagModels(ctx, store) *> clearObsoleteTagModels(ctx, store) *>
// when tags are deleted, categories may get removed. fix the json array // when tags are deleted, categories may get removed. fix the json array
store store
.transact(RClassifierSetting.fixCategoryList(ctx.args.collective)) .transact(RClassifierSetting.fixCategoryList(ctx.args.collectiveId))
.map(_ => ()) .map(_ => ())
} }
@ -92,7 +92,7 @@ object LearnClassifierTask {
): F[Unit] = ): F[Unit] =
for { for {
list <- store.transact( list <- store.transact(
ClassifierName.findOrphanTagModels(ctx.args.collective) ClassifierName.findOrphanTagModels(ctx.args.collectiveId)
) )
_ <- ctx.logger.info( _ <- ctx.logger.info(
s"Found ${list.size} obsolete model files that are deleted now." s"Found ${list.size} obsolete model files that are deleted now."
@ -110,7 +110,7 @@ object LearnClassifierTask {
cfg: Config.TextAnalysis cfg: Config.TextAnalysis
): OptionT[F, OCollective.Classifier] = ): OptionT[F, OCollective.Classifier] =
if (cfg.classification.enabled) if (cfg.classification.enabled)
OptionT(store.transact(RClassifierSetting.findById(ctx.args.collective))) OptionT(store.transact(RClassifierSetting.findById(ctx.args.collectiveId)))
.filter(_.autoTagEnabled) .filter(_.autoTagEnabled)
.map(OCollective.Classifier.fromRecord) .map(OCollective.Classifier.fromRecord)
else else

View File

@ -21,7 +21,7 @@ object LearnItemEntities {
def learnAll[F[_]: Async, A]( def learnAll[F[_]: Async, A](
analyser: TextAnalyser[F], analyser: TextAnalyser[F],
store: Store[F], store: Store[F],
collective: Ident, collective: CollectiveId,
maxItems: Int, maxItems: Int,
maxTextLen: Int maxTextLen: Int
): Task[F, A, Unit] = ): Task[F, A, Unit] =
@ -35,7 +35,7 @@ object LearnItemEntities {
def learnCorrOrg[F[_]: Async, A]( def learnCorrOrg[F[_]: Async, A](
analyser: TextAnalyser[F], analyser: TextAnalyser[F],
store: Store[F], store: Store[F],
collective: Ident, collective: CollectiveId,
maxItems: Int, maxItems: Int,
maxTextLen: Int maxTextLen: Int
): Task[F, A, Unit] = ): Task[F, A, Unit] =
@ -47,7 +47,7 @@ object LearnItemEntities {
def learnCorrPerson[F[_]: Async, A]( def learnCorrPerson[F[_]: Async, A](
analyser: TextAnalyser[F], analyser: TextAnalyser[F],
store: Store[F], store: Store[F],
collective: Ident, collective: CollectiveId,
maxItems: Int, maxItems: Int,
maxTextLen: Int maxTextLen: Int
): Task[F, A, Unit] = ): Task[F, A, Unit] =
@ -59,7 +59,7 @@ object LearnItemEntities {
def learnConcPerson[F[_]: Async, A]( def learnConcPerson[F[_]: Async, A](
analyser: TextAnalyser[F], analyser: TextAnalyser[F],
store: Store[F], store: Store[F],
collective: Ident, collective: CollectiveId,
maxItems: Int, maxItems: Int,
maxTextLen: Int maxTextLen: Int
): Task[F, A, Unit] = ): Task[F, A, Unit] =
@ -71,7 +71,7 @@ object LearnItemEntities {
def learnConcEquip[F[_]: Async, A]( def learnConcEquip[F[_]: Async, A](
analyser: TextAnalyser[F], analyser: TextAnalyser[F],
store: Store[F], store: Store[F],
collective: Ident, collective: CollectiveId,
maxItems: Int, maxItems: Int,
maxTextLen: Int maxTextLen: Int
): Task[F, A, Unit] = ): Task[F, A, Unit] =
@ -83,7 +83,7 @@ object LearnItemEntities {
private def learn[F[_]: Async, A]( private def learn[F[_]: Async, A](
store: Store[F], store: Store[F],
analyser: TextAnalyser[F], analyser: TextAnalyser[F],
collective: Ident collective: CollectiveId
)(cname: ClassifierName, data: Context[F, _] => Stream[F, Data]): Task[F, A, Unit] = )(cname: ClassifierName, data: Context[F, _] => Stream[F, Data]): Task[F, A, Unit] =
Task { ctx => Task { ctx =>
ctx.logger.info(s"Learn classifier ${cname.name}") *> ctx.logger.info(s"Learn classifier ${cname.name}") *>

View File

@ -21,7 +21,7 @@ object LearnTags {
def learnTagCategory[F[_]: Async, A]( def learnTagCategory[F[_]: Async, A](
analyser: TextAnalyser[F], analyser: TextAnalyser[F],
store: Store[F], store: Store[F],
collective: Ident, collective: CollectiveId,
maxItems: Int, maxItems: Int,
maxTextLen: Int maxTextLen: Int
)( )(
@ -44,7 +44,7 @@ object LearnTags {
} }
def learnAllTagCategories[F[_]: Async, A](analyser: TextAnalyser[F], store: Store[F])( def learnAllTagCategories[F[_]: Async, A](analyser: TextAnalyser[F], store: Store[F])(
collective: Ident, collective: CollectiveId,
maxItems: Int, maxItems: Int,
maxTextLen: Int maxTextLen: Int
): Task[F, A, Unit] = ): Task[F, A, Unit] =

View File

@ -20,7 +20,7 @@ object SelectItems {
val pageSep = LearnClassifierTask.pageSep val pageSep = LearnClassifierTask.pageSep
val noClass = LearnClassifierTask.noClass val noClass = LearnClassifierTask.noClass
def forCategory[F[_]](store: Store[F], collective: Ident)( def forCategory[F[_]](store: Store[F], collective: CollectiveId)(
maxItems: Int, maxItems: Int,
category: String, category: String,
maxTextLen: Int maxTextLen: Int
@ -36,7 +36,7 @@ object SelectItems {
def forCorrOrg[F[_]]( def forCorrOrg[F[_]](
store: Store[F], store: Store[F],
collective: Ident, collective: CollectiveId,
maxItems: Int, maxItems: Int,
maxTextLen: Int maxTextLen: Int
): Stream[F, Data] = { ): Stream[F, Data] = {
@ -51,7 +51,7 @@ object SelectItems {
def forCorrPerson[F[_]]( def forCorrPerson[F[_]](
store: Store[F], store: Store[F],
collective: Ident, collective: CollectiveId,
maxItems: Int, maxItems: Int,
maxTextLen: Int maxTextLen: Int
): Stream[F, Data] = { ): Stream[F, Data] = {
@ -66,7 +66,7 @@ object SelectItems {
def forConcPerson[F[_]]( def forConcPerson[F[_]](
store: Store[F], store: Store[F],
collective: Ident, collective: CollectiveId,
maxItems: Int, maxItems: Int,
maxTextLen: Int maxTextLen: Int
): Stream[F, Data] = { ): Stream[F, Data] = {
@ -81,7 +81,7 @@ object SelectItems {
def forConcEquip[F[_]]( def forConcEquip[F[_]](
store: Store[F], store: Store[F],
collective: Ident, collective: CollectiveId,
maxItems: Int, maxItems: Int,
maxTextLen: Int maxTextLen: Int
): Stream[F, Data] = { ): Stream[F, Data] = {
@ -94,7 +94,10 @@ object SelectItems {
store.transact(connStream) store.transact(connStream)
} }
private def allItems(collective: Ident, max: Int): Stream[ConnectionIO, Ident] = { private def allItems(
collective: CollectiveId,
max: Int
): Stream[ConnectionIO, Ident] = {
val limit = if (max <= 0) Batch.all else Batch.limit(max) val limit = if (max <= 0) Batch.all else Batch.limit(max)
QItem.findAllNewesFirst(collective, 10, limit) QItem.findAllNewesFirst(collective, 10, limit)
} }

View File

@ -21,7 +21,7 @@ object StoreClassifierModel {
def handleModel[F[_]: Async]( def handleModel[F[_]: Async](
store: Store[F], store: Store[F],
logger: Logger[F], logger: Logger[F],
collective: Ident, collective: CollectiveId,
modelName: ClassifierName modelName: ClassifierName
)( )(
trainedModel: ClassifierModel trainedModel: ClassifierModel

View File

@ -17,6 +17,7 @@ import docspell.common._
import docspell.common.util.Zip import docspell.common.util.Zip
import docspell.logging.Logger import docspell.logging.Logger
import docspell.scheduler._ import docspell.scheduler._
import docspell.scheduler.usertask.UserTaskScope
import docspell.store.Store import docspell.store.Store
/** Task to submit multiple files at once. By default, one file in an upload results in /** Task to submit multiple files at once. By default, one file in an upload results in
@ -90,7 +91,7 @@ object MultiUploadArchiveTask {
submitter = currentJob.map(_.submitter).getOrElse(DocspellSystem.user) submitter = currentJob.map(_.submitter).getOrElse(DocspellSystem.user)
job <- JobFactory.processItem( job <- JobFactory.processItem(
args, args,
AccountId(ctx.args.meta.collective, submitter), UserTaskScope(ctx.args.meta.collective, submitter.some),
prio, prio,
None None
) )

View File

@ -53,7 +53,12 @@ object PeriodicDueItemsTask {
def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])( def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])(
cont: Vector[NotificationChannel] => F[Unit] cont: Vector[NotificationChannel] => F[Unit]
): F[Unit] = ): F[Unit] =
TaskOperations.withChannel(ctx.logger, ctx.args.channels, ctx.args.account, ops)(cont) TaskOperations.withChannel(
ctx.logger,
ctx.args.channels,
ctx.args.account.userId,
ops
)(cont)
def withItems[F[_]: Sync]( def withItems[F[_]: Sync](
ctx: Context[F, Args], ctx: Context[F, Args],

View File

@ -54,7 +54,12 @@ object PeriodicQueryTask {
def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])( def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])(
cont: Vector[NotificationChannel] => F[Unit] cont: Vector[NotificationChannel] => F[Unit]
): F[Unit] = ): F[Unit] =
TaskOperations.withChannel(ctx.logger, ctx.args.channels, ctx.args.account, ops)(cont) TaskOperations.withChannel(
ctx.logger,
ctx.args.channels,
ctx.args.account.userId,
ops
)(cont)
private def queryString(q: ItemQuery.Expr) = private def queryString(q: ItemQuery.Expr) =
ItemQueryParser.asString(q) ItemQueryParser.asString(q)
@ -64,7 +69,10 @@ object PeriodicQueryTask {
): F[Unit] = { ): F[Unit] = {
def fromBookmark(id: String) = def fromBookmark(id: String) =
store store
.transact(RQueryBookmark.findByNameOrId(ctx.args.account, id)) .transact(
RQueryBookmark
.findByNameOrId(ctx.args.account.collectiveId, ctx.args.account.userId, id)
)
.map(_.map(_.query)) .map(_.map(_.query))
.flatTap(q => .flatTap(q =>
ctx.logger.debug(s"Loaded bookmark '$id': ${q.map(_.expr).map(queryString)}") ctx.logger.debug(s"Loaded bookmark '$id': ${q.map(_.expr).map(queryString)}")
@ -72,7 +80,9 @@ object PeriodicQueryTask {
def fromShare(id: String) = def fromShare(id: String) =
store store
.transact(RShare.findOneByCollective(ctx.args.account.collective, Some(true), id)) .transact(
RShare.findOneByCollective(ctx.args.account.collectiveId, Some(true), id)
)
.map(_.map(_.query)) .map(_.map(_.query))
.flatTap(q => .flatTap(q =>
ctx.logger.debug(s"Loaded share '$id': ${q.map(_.expr).map(queryString)}") ctx.logger.debug(s"Loaded share '$id': ${q.map(_.expr).map(queryString)}")

View File

@ -25,24 +25,24 @@ trait TaskOperations {
def withChannel[F[_]: Sync]( def withChannel[F[_]: Sync](
logger: Logger[F], logger: Logger[F],
channelsIn: NonEmptyList[ChannelRef], channelsIn: NonEmptyList[ChannelRef],
accountId: AccountId, userId: Ident,
ops: ONotification[F] ops: ONotification[F]
)( )(
cont: Vector[NotificationChannel] => F[Unit] cont: Vector[NotificationChannel] => F[Unit]
): F[Unit] = { ): F[Unit] = {
val channels = val channels =
channelsIn.toList.toVector.flatTraverse(ops.findNotificationChannel(_, accountId)) channelsIn.toList.toVector.flatTraverse(ops.findNotificationChannel(_, userId))
channels.flatMap { ch => channels.flatMap { ch =>
if (ch.isEmpty) if (ch.isEmpty)
logger.error(s"No channels found for the given data: ${channelsIn}") logger.error(s"No channels found for the given data: $channelsIn")
else cont(ch) else cont(ch)
} }
} }
def withEventContext[F[_]]( def withEventContext[F[_]](
logger: Logger[F], logger: Logger[F],
account: AccountId, account: AccountInfo,
baseUrl: Option[LenientUri], baseUrl: Option[LenientUri],
items: Vector[ListItem], items: Vector[ListItem],
contentStart: Option[String], contentStart: Option[String],

Some files were not shown because too many files have changed in this diff Show More