Add a folder-id to item processing

This allows to define a folder when uploading files. All generated
items are associated to this folder on creation.
This commit is contained in:
Eike Kettner 2020-07-14 21:25:44 +02:00
parent ec7f027b4e
commit 5b01c93711
14 changed files with 126 additions and 24 deletions

View File

@ -58,6 +58,7 @@ object OUpload {
case class UploadMeta( case class UploadMeta(
direction: Option[Direction], direction: Option[Direction],
sourceAbbrev: String, sourceAbbrev: String,
folderId: Option[Ident],
validFileTypes: Seq[MimeType] validFileTypes: Seq[MimeType]
) )
@ -123,6 +124,7 @@ object OUpload {
lang.getOrElse(Language.German), lang.getOrElse(Language.German),
data.meta.direction, data.meta.direction,
data.meta.sourceAbbrev, data.meta.sourceAbbrev,
data.meta.folderId,
data.meta.validFileTypes data.meta.validFileTypes
) )
args = args =
@ -147,7 +149,10 @@ object OUpload {
(for { (for {
src <- OptionT(store.transact(RSource.find(sourceId))) src <- OptionT(store.transact(RSource.find(sourceId)))
updata = data.copy( updata = data.copy(
meta = data.meta.copy(sourceAbbrev = src.abbrev), meta = data.meta.copy(
sourceAbbrev = src.abbrev,
folderId = data.meta.folderId.orElse(src.folderId)
),
priority = src.priority priority = src.priority
) )
accId = AccountId(src.cid, src.sid) accId = AccountId(src.cid, src.sid)

View File

@ -36,6 +36,7 @@ object ProcessItemArgs {
language: Language, language: Language,
direction: Option[Direction], direction: Option[Direction],
sourceAbbrev: String, sourceAbbrev: String,
folderId: Option[Ident],
validFileTypes: Seq[MimeType] validFileTypes: Seq[MimeType]
) )

View File

@ -27,7 +27,9 @@ case class ScanMailboxArgs(
// delete the after submitting (only if targetFolder is None) // delete the after submitting (only if targetFolder is None)
deleteMail: Boolean, deleteMail: Boolean,
// set the direction when submitting // set the direction when submitting
direction: Option[Direction] direction: Option[Direction],
// set a folder for items
itemFolder: Option[Ident]
) )
object ScanMailboxArgs { object ScanMailboxArgs {

View File

@ -84,6 +84,7 @@ object JoexAppImpl {
joex <- OJoex(client, store) joex <- OJoex(client, store)
upload <- OUpload(store, queue, cfg.files, joex) upload <- OUpload(store, queue, cfg.files, joex)
fts <- createFtsClient(cfg)(httpClient) fts <- createFtsClient(cfg)(httpClient)
itemOps <- OItem(store, fts)
javaEmil = javaEmil =
JavaMailEmil(blocker, Settings.defaultSettings.copy(debug = cfg.mailDebug)) JavaMailEmil(blocker, Settings.defaultSettings.copy(debug = cfg.mailDebug))
sch <- SchedulerBuilder(cfg.scheduler, blocker, store) sch <- SchedulerBuilder(cfg.scheduler, blocker, store)
@ -91,7 +92,7 @@ object JoexAppImpl {
.withTask( .withTask(
JobTask.json( JobTask.json(
ProcessItemArgs.taskName, ProcessItemArgs.taskName,
ItemHandler.newItem[F](cfg, fts), ItemHandler.newItem[F](cfg, itemOps, fts),
ItemHandler.onCancel[F] ItemHandler.onCancel[F]
) )
) )

View File

@ -5,6 +5,7 @@ import cats.effect._
import cats.implicits._ import cats.implicits._
import fs2.Stream import fs2.Stream
import docspell.backend.ops.OItem
import docspell.common.{ItemState, ProcessItemArgs} import docspell.common.{ItemState, ProcessItemArgs}
import docspell.ftsclient.FtsClient import docspell.ftsclient.FtsClient
import docspell.joex.Config import docspell.joex.Config
@ -27,11 +28,12 @@ object ItemHandler {
def newItem[F[_]: ConcurrentEffect: ContextShift]( def newItem[F[_]: ConcurrentEffect: ContextShift](
cfg: Config, cfg: Config,
itemOps: OItem[F],
fts: FtsClient[F] fts: FtsClient[F]
): Task[F, Args, Unit] = ): Task[F, Args, Unit] =
CreateItem[F] CreateItem[F]
.flatMap(itemStateTask(ItemState.Processing)) .flatMap(itemStateTask(ItemState.Processing))
.flatMap(safeProcess[F](cfg, fts)) .flatMap(safeProcess[F](cfg, itemOps, fts))
.map(_ => ()) .map(_ => ())
def itemStateTask[F[_]: Sync, A]( def itemStateTask[F[_]: Sync, A](
@ -48,11 +50,12 @@ object ItemHandler {
def safeProcess[F[_]: ConcurrentEffect: ContextShift]( def safeProcess[F[_]: ConcurrentEffect: ContextShift](
cfg: Config, cfg: Config,
itemOps: OItem[F],
fts: FtsClient[F] fts: FtsClient[F]
)(data: ItemData): Task[F, Args, ItemData] = )(data: ItemData): Task[F, Args, ItemData] =
isLastRetry[F].flatMap { isLastRetry[F].flatMap {
case true => case true =>
ProcessItem[F](cfg, fts)(data).attempt.flatMap({ ProcessItem[F](cfg, itemOps, fts)(data).attempt.flatMap({
case Right(d) => case Right(d) =>
Task.pure(d) Task.pure(d)
case Left(ex) => case Left(ex) =>
@ -62,7 +65,7 @@ object ItemHandler {
.andThen(_ => Sync[F].raiseError(ex)) .andThen(_ => Sync[F].raiseError(ex))
}) })
case false => case false =>
ProcessItem[F](cfg, fts)(data).flatMap(itemStateTask(ItemState.Created)) ProcessItem[F](cfg, itemOps, fts)(data).flatMap(itemStateTask(ItemState.Created))
} }
private def markItemCreated[F[_]: Sync]: Task[F, Args, Boolean] = private def markItemCreated[F[_]: Sync]: Task[F, Args, Boolean] =

View File

@ -2,6 +2,7 @@ package docspell.joex.process
import cats.effect._ import cats.effect._
import docspell.backend.ops.OItem
import docspell.common.ProcessItemArgs import docspell.common.ProcessItemArgs
import docspell.ftsclient.FtsClient import docspell.ftsclient.FtsClient
import docspell.joex.Config import docspell.joex.Config
@ -11,6 +12,7 @@ object ProcessItem {
def apply[F[_]: ConcurrentEffect: ContextShift]( def apply[F[_]: ConcurrentEffect: ContextShift](
cfg: Config, cfg: Config,
itemOps: OItem[F],
fts: FtsClient[F] fts: FtsClient[F]
)(item: ItemData): Task[F, ProcessItemArgs, ItemData] = )(item: ItemData): Task[F, ProcessItemArgs, ItemData] =
ExtractArchive(item) ExtractArchive(item)
@ -22,6 +24,7 @@ object ProcessItem {
.flatMap(analysisOnly[F](cfg)) .flatMap(analysisOnly[F](cfg))
.flatMap(Task.setProgress(80)) .flatMap(Task.setProgress(80))
.flatMap(LinkProposal[F]) .flatMap(LinkProposal[F])
.flatMap(SetGivenData[F](itemOps))
.flatMap(Task.setProgress(99)) .flatMap(Task.setProgress(99))
def analysisOnly[F[_]: Sync]( def analysisOnly[F[_]: Sync](

View File

@ -0,0 +1,35 @@
package docspell.joex.process
import cats.effect._
import cats.implicits._
import docspell.backend.ops.OItem
import docspell.common._
import docspell.joex.scheduler.Task
object SetGivenData {
def apply[F[_]: Sync](
ops: OItem[F]
)(data: ItemData): Task[F, ProcessItemArgs, ItemData] =
if (data.item.state.isValid)
Task
.log[F, ProcessItemArgs](_.debug(s"Not setting data on existing item"))
.map(_ => data)
else
Task { ctx =>
val itemId = data.item.id
val folderId = ctx.args.meta.folderId
val collective = ctx.args.meta.collective
for {
_ <- ctx.logger.info("Starting setting given data")
_ <- ctx.logger.debug(s"Set item folder: '${folderId.map(_.id)}'")
e <- ops.setFolder(itemId, folderId, collective).attempt
_ <- e.fold(
ex => ctx.logger.warn(s"Error setting folder: ${ex.getMessage}"),
_ => ().pure[F]
)
} yield data
}
}

View File

@ -143,7 +143,7 @@ object ScanMailboxTask {
folder <- requireFolder(a)(name) folder <- requireFolder(a)(name)
search <- searchMails(a)(folder) search <- searchMails(a)(folder)
headers <- Kleisli.liftF(filterMessageIds(search.mails)) headers <- Kleisli.liftF(filterMessageIds(search.mails))
_ <- headers.traverse(handleOne(a, upload)) _ <- headers.traverse(handleOne(ctx.args, a, upload))
} yield ScanResult(name, search.mails.size, search.count - search.mails.size) } yield ScanResult(name, search.mails.size, search.count - search.mails.size)
def requireFolder[C](a: Access[F, C])(name: String): MailOp[F, C, MailFolder] = def requireFolder[C](a: Access[F, C])(name: String): MailOp[F, C, MailFolder] =
@ -239,7 +239,9 @@ object ScanMailboxTask {
MailOp.pure(()) MailOp.pure(())
} }
def submitMail(upload: OUpload[F])(mail: Mail[F]): F[OUpload.UploadResult] = { def submitMail(upload: OUpload[F], args: Args)(
mail: Mail[F]
): F[OUpload.UploadResult] = {
val file = OUpload.File( val file = OUpload.File(
Some(mail.header.subject + ".eml"), Some(mail.header.subject + ".eml"),
Some(MimeType.emls.head), Some(MimeType.emls.head),
@ -251,6 +253,7 @@ object ScanMailboxTask {
meta = OUpload.UploadMeta( meta = OUpload.UploadMeta(
Some(dir), Some(dir),
s"mailbox-${ctx.args.account.user.id}", s"mailbox-${ctx.args.account.user.id}",
args.itemFolder,
Seq.empty Seq.empty
) )
data = OUpload.UploadData( data = OUpload.UploadData(
@ -264,14 +267,14 @@ object ScanMailboxTask {
} yield res } yield res
} }
def handleOne[C](a: Access[F, C], upload: OUpload[F])( def handleOne[C](args: Args, a: Access[F, C], upload: OUpload[F])(
mh: MailHeader mh: MailHeader
): MailOp[F, C, Unit] = ): MailOp[F, C, Unit] =
for { for {
mail <- a.loadMail(mh) mail <- a.loadMail(mh)
res <- mail match { res <- mail match {
case Some(m) => case Some(m) =>
Kleisli.liftF(submitMail(upload)(m).attempt) Kleisli.liftF(submitMail(upload, args)(m).attempt)
case None => case None =>
MailOp.pure[F, C, Either[Throwable, OUpload.UploadResult]]( MailOp.pure[F, C, Either[Throwable, OUpload.UploadResult]](
Either.left(new Exception(s"Mail not found")) Either.left(new Exception(s"Mail not found"))

View File

@ -144,6 +144,7 @@ structure:
``` ```
{ multiple: Bool { multiple: Bool
, direction: Maybe String , direction: Maybe String
, folder: Maybe String
} }
``` ```
@ -156,6 +157,11 @@ Furthermore, the direction of the document (one of `incoming` or
`outgoing`) can be given. It is optional, it can be left out or `outgoing`) can be given. It is optional, it can be left out or
`null`. `null`.
A `folder` id can be specified. Each item created by this request will
be placed into this folder. Errors are logged (for example, the folder
may have been deleted before the task is executed) and the item is
then not put into any folder.
This kind of request is very common and most programming languages This kind of request is very common and most programming languages
have support for this. For example, here is another curl command have support for this. For example, here is another curl command
uploading two files with meta data: uploading two files with meta data:

View File

@ -2694,6 +2694,13 @@ components:
The direction to apply to items resulting from importing The direction to apply to items resulting from importing
mails. If not set, the value is guessed based on the from mails. If not set, the value is guessed based on the from
and to mail headers and your address book. and to mail headers and your address book.
itemFolder:
type: string
format: ident
description: |
The folder id that is applied to items resulting from
importing mails. If the folder id is not valid when the
task executes, items have no folder set.
ImapSettingsList: ImapSettingsList:
description: | description: |
A list of user email settings. A list of user email settings.
@ -3437,9 +3444,15 @@ components:
Meta information for an item upload. The user can specify some Meta information for an item upload. The user can specify some
structured information with a binary file. structured information with a binary file.
Additional metadata is not required. However, you have to Additional metadata is not required. However, if there is some
specifiy whether the corresponding files should become one specified, you have to specifiy whether the corresponding
single item or if an item is created for each file. files should become one single item or if an item is created
for each file.
A direction can be given, `Incoming` is used if not specified.
A folderId can be given, the item is placed into this folder
after creation.
required: required:
- multiple - multiple
properties: properties:
@ -3449,6 +3462,9 @@ components:
direction: direction:
type: string type: string
format: direction format: direction
folder:
type: string
format: ident
Collective: Collective:
description: | description: |
Information about a collective. Information about a collective.
@ -3519,6 +3535,9 @@ components:
priority: priority:
type: string type: string
format: priority format: priority
folder:
type: string
format: ident
created: created:
description: DateTime description: DateTime
type: integer type: integer

View File

@ -287,9 +287,11 @@ trait Conversions {
.find(_.name.exists(_.equalsIgnoreCase("meta"))) .find(_.name.exists(_.equalsIgnoreCase("meta")))
.map(p => parseMeta(p.body)) .map(p => parseMeta(p.body))
.map(fm => .map(fm =>
fm.map(m => (m.multiple, UploadMeta(m.direction, "webapp", validFileTypes))) fm.map(m =>
(m.multiple, UploadMeta(m.direction, "webapp", m.folder, validFileTypes))
)
) )
.getOrElse((true, UploadMeta(None, "webapp", validFileTypes)).pure[F]) .getOrElse((true, UploadMeta(None, "webapp", None, validFileTypes)).pure[F])
val files = mp.parts val files = mp.parts
.filter(p => p.name.forall(s => !s.equalsIgnoreCase("meta"))) .filter(p => p.name.forall(s => !s.equalsIgnoreCase("meta")))
@ -491,12 +493,21 @@ trait Conversions {
// sources // sources
def mkSource(s: RSource): Source = def mkSource(s: RSource): Source =
Source(s.sid, s.abbrev, s.description, s.counter, s.enabled, s.priority, s.created) Source(
s.sid,
s.abbrev,
s.description,
s.counter,
s.enabled,
s.priority,
s.folderId,
s.created
)
def newSource[F[_]: Sync](s: Source, cid: Ident): F[RSource] = def newSource[F[_]: Sync](s: Source, cid: Ident): F[RSource] =
timeId.map({ timeId.map({
case (id, now) => case (id, now) =>
RSource(id, cid, s.abbrev, s.description, 0, s.enabled, s.priority, now) RSource(id, cid, s.abbrev, s.description, 0, s.enabled, s.priority, now, s.folder)
}) })
def changeSource[F[_]: Sync](s: Source, coll: Ident): RSource = def changeSource[F[_]: Sync](s: Source, coll: Ident): RSource =
@ -508,7 +519,8 @@ trait Conversions {
s.counter, s.counter,
s.enabled, s.enabled,
s.priority, s.priority,
s.created s.created,
s.folder
) )
// equipment // equipment

View File

@ -112,7 +112,8 @@ object ScanMailboxRoutes {
settings.receivedSinceHours.map(_.toLong).map(Duration.hours), settings.receivedSinceHours.map(_.toLong).map(Duration.hours),
settings.targetFolder, settings.targetFolder,
settings.deleteMail, settings.deleteMail,
settings.direction settings.direction,
settings.itemFolder
) )
) )
) )
@ -139,6 +140,7 @@ object ScanMailboxRoutes {
task.args.receivedSince.map(_.hours.toInt), task.args.receivedSince.map(_.hours.toInt),
task.args.targetFolder, task.args.targetFolder,
task.args.deleteMail, task.args.deleteMail,
task.args.direction task.args.direction,
task.args.itemFolder
) )
} }

View File

@ -50,6 +50,7 @@ object QFolder {
def tryDelete = def tryDelete =
for { for {
_ <- RItem.removeFolder(id) _ <- RItem.removeFolder(id)
_ <- RSource.removeFolder(id)
_ <- RFolderMember.deleteAll(id) _ <- RFolderMember.deleteAll(id)
_ <- RFolder.delete(id) _ <- RFolder.delete(id)
} yield FolderChangeResult.success } yield FolderChangeResult.success

View File

@ -15,7 +15,8 @@ case class RSource(
counter: Int, counter: Int,
enabled: Boolean, enabled: Boolean,
priority: Priority, priority: Priority,
created: Timestamp created: Timestamp,
folderId: Option[Ident]
) {} ) {}
object RSource { object RSource {
@ -32,8 +33,10 @@ object RSource {
val enabled = Column("enabled") val enabled = Column("enabled")
val priority = Column("priority") val priority = Column("priority")
val created = Column("created") val created = Column("created")
val folder = Column("folder_id")
val all = List(sid, cid, abbrev, description, counter, enabled, priority, created) val all =
List(sid, cid, abbrev, description, counter, enabled, priority, created, folder)
} }
import Columns._ import Columns._
@ -42,7 +45,7 @@ object RSource {
val sql = insertRow( val sql = insertRow(
table, table,
all, all,
fr"${v.sid},${v.cid},${v.abbrev},${v.description},${v.counter},${v.enabled},${v.priority},${v.created}" fr"${v.sid},${v.cid},${v.abbrev},${v.description},${v.counter},${v.enabled},${v.priority},${v.created},${v.folderId}"
) )
sql.update.run sql.update.run
} }
@ -56,7 +59,8 @@ object RSource {
abbrev.setTo(v.abbrev), abbrev.setTo(v.abbrev),
description.setTo(v.description), description.setTo(v.description),
enabled.setTo(v.enabled), enabled.setTo(v.enabled),
priority.setTo(v.priority) priority.setTo(v.priority),
folder.setTo(v.folderId)
) )
) )
sql.update.run sql.update.run
@ -97,4 +101,9 @@ object RSource {
def delete(sourceId: Ident, coll: Ident): ConnectionIO[Int] = def delete(sourceId: Ident, coll: Ident): ConnectionIO[Int] =
deleteFrom(table, and(sid.is(sourceId), cid.is(coll))).update.run deleteFrom(table, and(sid.is(sourceId), cid.is(coll))).update.run
def removeFolder(folderId: Ident): ConnectionIO[Int] = {
val empty: Option[Ident] = None
updateRow(table, folder.is(folderId), folder.setTo(empty)).update.run
}
} }