mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-04-05 02:49:32 +00:00
commit
324476c1a9
13
Changelog.md
13
Changelog.md
@ -8,18 +8,21 @@
|
||||
periodically to import your mails.
|
||||
- New feature "Integration Endpoint". Allows an admin to upload files
|
||||
to any collective using a separate endpoint.
|
||||
- New feature: add files to existing items.
|
||||
- The document list on the front-page has been rewritten. The table is
|
||||
removed and documents are now presented in a “card view”.
|
||||
- Amend the mail-to-pdf conversion to include the e-mail date.
|
||||
- When processing e-mails, set the item date automatically from the
|
||||
received-date in the mail.
|
||||
- Fixes regarding character encodings when reading e-mails.
|
||||
- Fix the `find-by-checksum` route that, given a sha256 checksum,
|
||||
returns whether there is such a file in docspell. It falsely
|
||||
returned `false` although documents existed.
|
||||
- Amend the mail-to-pdf conversion to include the e-mail date.
|
||||
- Fix webapp for mobile devices.
|
||||
- The document list on the front-page has been rewritten. The table is
|
||||
removed and documents are now presented in a “card view”.
|
||||
- Fix the search menu to remember dates in fields. When going back
|
||||
from an item detail to the front-page, the search menu remembers the
|
||||
last state, but dates were cleared.
|
||||
- More fixes regarding character encodings when reading e-mails.
|
||||
- Fix redirecting `/` to `/app`.
|
||||
- Fix redirecting `/` only to `/app`.
|
||||
|
||||
### Configuration Changes
|
||||
|
||||
|
@ -19,6 +19,15 @@ private[analysis] object Tld {
|
||||
".edu",
|
||||
".gov",
|
||||
".mil",
|
||||
".info",
|
||||
".app",
|
||||
".bar",
|
||||
".biz",
|
||||
".club",
|
||||
".coop",
|
||||
".icu",
|
||||
".name",
|
||||
".xyz",
|
||||
".ad",
|
||||
".ae",
|
||||
".al",
|
||||
|
@ -1,15 +1,17 @@
|
||||
package docspell.backend.ops
|
||||
|
||||
import bitpeace.MimetypeHint
|
||||
import cats.implicits._
|
||||
import cats.Functor
|
||||
import cats.data.{EitherT, OptionT}
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import docspell.backend.Config
|
||||
import fs2.Stream
|
||||
import docspell.common._
|
||||
import docspell.common.syntax.all._
|
||||
import docspell.store.Store
|
||||
import docspell.store.queue.JobQueue
|
||||
import docspell.store.records.{RCollective, RJob, RSource}
|
||||
import docspell.store.records._
|
||||
import org.log4s._
|
||||
|
||||
trait OUpload[F[_]] {
|
||||
@ -17,14 +19,29 @@ trait OUpload[F[_]] {
|
||||
def submit(
|
||||
data: OUpload.UploadData[F],
|
||||
account: AccountId,
|
||||
notifyJoex: Boolean
|
||||
notifyJoex: Boolean,
|
||||
itemId: Option[Ident]
|
||||
): F[OUpload.UploadResult]
|
||||
|
||||
def submit(
|
||||
data: OUpload.UploadData[F],
|
||||
sourceId: Ident,
|
||||
notifyJoex: Boolean
|
||||
notifyJoex: Boolean,
|
||||
itemId: Option[Ident]
|
||||
): F[OUpload.UploadResult]
|
||||
|
||||
final def submitEither(
|
||||
data: OUpload.UploadData[F],
|
||||
accOrSrc: Either[Ident, AccountId],
|
||||
notifyJoex: Boolean,
|
||||
itemId: Option[Ident]
|
||||
): F[OUpload.UploadResult] =
|
||||
accOrSrc match {
|
||||
case Right(acc) =>
|
||||
submit(data, acc, notifyJoex, itemId)
|
||||
case Left(srcId) =>
|
||||
submit(data, srcId, notifyJoex, itemId)
|
||||
}
|
||||
}
|
||||
|
||||
object OUpload {
|
||||
@ -52,11 +69,32 @@ object OUpload {
|
||||
|
||||
sealed trait UploadResult
|
||||
object UploadResult {
|
||||
case object Success extends UploadResult
|
||||
case object NoFiles extends UploadResult
|
||||
|
||||
/** File(s) have been successfully submitted. */
|
||||
case object Success extends UploadResult
|
||||
|
||||
def success: UploadResult = Success
|
||||
|
||||
/** There were no files to submit. */
|
||||
case object NoFiles extends UploadResult
|
||||
|
||||
def noFiles: UploadResult = NoFiles
|
||||
|
||||
/** A source (`RSource') could not be found for a given source-id. */
|
||||
case object NoSource extends UploadResult
|
||||
|
||||
def noSource: UploadResult = NoSource
|
||||
|
||||
/** When adding files to an item, no item was found using the given
|
||||
* item-id. */
|
||||
case object NoItem extends UploadResult
|
||||
|
||||
def noItem: UploadResult = NoItem
|
||||
}
|
||||
|
||||
private def right[F[_]: Functor, A](a: F[A]): EitherT[F, UploadResult, A] =
|
||||
EitherT.right(a)
|
||||
|
||||
def apply[F[_]: Sync](
|
||||
store: Store[F],
|
||||
queue: JobQueue[F],
|
||||
@ -68,14 +106,17 @@ object OUpload {
|
||||
def submit(
|
||||
data: OUpload.UploadData[F],
|
||||
account: AccountId,
|
||||
notifyJoex: Boolean
|
||||
notifyJoex: Boolean,
|
||||
itemId: Option[Ident]
|
||||
): F[OUpload.UploadResult] =
|
||||
for {
|
||||
files <- data.files.traverse(saveFile).map(_.flatten)
|
||||
pred <- checkFileList(files)
|
||||
lang <- store.transact(RCollective.findLanguage(account.collective))
|
||||
(for {
|
||||
_ <- checkExistingItem(itemId, account.collective)
|
||||
files <- right(data.files.traverse(saveFile).map(_.flatten))
|
||||
_ <- checkFileList(files)
|
||||
lang <- right(store.transact(RCollective.findLanguage(account.collective)))
|
||||
meta = ProcessItemArgs.ProcessMeta(
|
||||
account.collective,
|
||||
itemId,
|
||||
lang.getOrElse(Language.German),
|
||||
data.meta.direction,
|
||||
data.meta.sourceAbbrev,
|
||||
@ -84,29 +125,31 @@ object OUpload {
|
||||
args =
|
||||
if (data.multiple) files.map(f => ProcessItemArgs(meta, List(f)))
|
||||
else Vector(ProcessItemArgs(meta, files.toList))
|
||||
job <- pred.traverse(_ => makeJobs(args, account, data.priority, data.tracker))
|
||||
_ <- logger.fdebug(s"Storing jobs: $job")
|
||||
res <- job.traverse(submitJobs(notifyJoex))
|
||||
_ <- store.transact(
|
||||
RSource.incrementCounter(data.meta.sourceAbbrev, account.collective)
|
||||
jobs <- right(makeJobs(args, account, data.priority, data.tracker))
|
||||
_ <- right(logger.fdebug(s"Storing jobs: $jobs"))
|
||||
res <- right(submitJobs(notifyJoex)(jobs))
|
||||
_ <- right(
|
||||
store.transact(
|
||||
RSource.incrementCounter(data.meta.sourceAbbrev, account.collective)
|
||||
)
|
||||
)
|
||||
} yield res.fold(identity, identity)
|
||||
} yield res).fold(identity, identity)
|
||||
|
||||
def submit(
|
||||
data: OUpload.UploadData[F],
|
||||
sourceId: Ident,
|
||||
notifyJoex: Boolean
|
||||
notifyJoex: Boolean,
|
||||
itemId: Option[Ident]
|
||||
): F[OUpload.UploadResult] =
|
||||
for {
|
||||
sOpt <-
|
||||
store
|
||||
.transact(RSource.find(sourceId))
|
||||
.map(_.toRight(UploadResult.NoSource))
|
||||
abbrev = sOpt.map(_.abbrev).toOption.getOrElse(data.meta.sourceAbbrev)
|
||||
updata = data.copy(meta = data.meta.copy(sourceAbbrev = abbrev))
|
||||
accId = sOpt.map(source => AccountId(source.cid, source.sid))
|
||||
result <- accId.traverse(acc => submit(updata, acc, notifyJoex))
|
||||
} yield result.fold(identity, identity)
|
||||
(for {
|
||||
src <- OptionT(store.transact(RSource.find(sourceId)))
|
||||
updata = data.copy(
|
||||
meta = data.meta.copy(sourceAbbrev = src.abbrev),
|
||||
priority = src.priority
|
||||
)
|
||||
accId = AccountId(src.cid, src.sid)
|
||||
result <- OptionT.liftF(submit(updata, accId, notifyJoex, itemId))
|
||||
} yield result).getOrElse(UploadResult.noSource)
|
||||
|
||||
private def submitJobs(
|
||||
notifyJoex: Boolean
|
||||
@ -117,6 +160,7 @@ object OUpload {
|
||||
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
|
||||
} yield UploadResult.Success
|
||||
|
||||
/** Saves the file into the database. */
|
||||
private def saveFile(file: File[F]): F[Option[ProcessItemArgs.File]] =
|
||||
logger.finfo(s"Receiving file $file") *>
|
||||
store.bitpeace
|
||||
@ -135,10 +179,24 @@ object OUpload {
|
||||
)
|
||||
)
|
||||
|
||||
private def checkExistingItem(
|
||||
itemId: Option[Ident],
|
||||
coll: Ident
|
||||
): EitherT[F, UploadResult, Unit] =
|
||||
itemId match {
|
||||
case None =>
|
||||
right(().pure[F])
|
||||
case Some(id) =>
|
||||
OptionT(store.transact(RItem.findByIdAndCollective(id, coll)))
|
||||
.toRight(UploadResult.noItem)
|
||||
.map(_ => ())
|
||||
}
|
||||
|
||||
private def checkFileList(
|
||||
files: Seq[ProcessItemArgs.File]
|
||||
): F[Either[UploadResult, Unit]] =
|
||||
Sync[F].pure(if (files.isEmpty) Left(UploadResult.NoFiles) else Right(()))
|
||||
): EitherT[F, UploadResult, Unit] =
|
||||
if (files.isEmpty) EitherT.left(UploadResult.noFiles.pure[F])
|
||||
else right(().pure[F])
|
||||
|
||||
private def makeJobs(
|
||||
args: Vector[ProcessItemArgs],
|
||||
|
@ -1,11 +1,18 @@
|
||||
package docspell.common
|
||||
|
||||
import io.circe.{Decoder, Encoder}
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
sealed trait ItemState { self: Product =>
|
||||
|
||||
final def name: String =
|
||||
productPrefix.toLowerCase
|
||||
|
||||
def isValid: Boolean =
|
||||
ItemState.validStates.exists(_ == this)
|
||||
|
||||
def isInvalid: Boolean =
|
||||
ItemState.invalidStates.exists(_ == this)
|
||||
}
|
||||
|
||||
object ItemState {
|
||||
@ -24,8 +31,11 @@ object ItemState {
|
||||
case _ => Left(s"Invalid item state: $str")
|
||||
}
|
||||
|
||||
val validStates: Seq[ItemState] =
|
||||
Seq(Created, Confirmed)
|
||||
val validStates: NonEmptyList[ItemState] =
|
||||
NonEmptyList.of(Created, Confirmed)
|
||||
|
||||
val invalidStates: NonEmptyList[ItemState] =
|
||||
NonEmptyList.of(Premature, Processing)
|
||||
|
||||
def unsafe(str: String): ItemState =
|
||||
fromString(str).fold(sys.error, identity)
|
||||
|
@ -4,6 +4,14 @@ import io.circe._, io.circe.generic.semiauto._
|
||||
import docspell.common.syntax.all._
|
||||
import ProcessItemArgs._
|
||||
|
||||
/** Arguments to the process-item task.
|
||||
*
|
||||
* This task is run for each new file to create a new item from it or
|
||||
* to add this file as an attachment to an existing item.
|
||||
*
|
||||
* If the `itemId' is set to some value, the item is tried to load to
|
||||
* ammend with the given files. Otherwise a new item is created.
|
||||
*/
|
||||
case class ProcessItemArgs(meta: ProcessMeta, files: List[File]) {
|
||||
|
||||
def makeSubject: String =
|
||||
@ -22,6 +30,7 @@ object ProcessItemArgs {
|
||||
|
||||
case class ProcessMeta(
|
||||
collective: Ident,
|
||||
itemId: Option[Ident],
|
||||
language: Language,
|
||||
direction: Option[Direction],
|
||||
sourceAbbrev: String,
|
||||
|
@ -77,7 +77,7 @@ object JoexAppImpl {
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
ProcessItemArgs.taskName,
|
||||
ItemHandler[F](cfg),
|
||||
ItemHandler.newItem[F](cfg),
|
||||
ItemHandler.onCancel[F]
|
||||
)
|
||||
)
|
||||
|
@ -71,7 +71,7 @@ object NotifyDueItemsTask {
|
||||
QItem.Query
|
||||
.empty(ctx.args.account.collective)
|
||||
.copy(
|
||||
states = ItemState.validStates,
|
||||
states = ItemState.validStates.toList,
|
||||
tagsInclude = ctx.args.tagsInclude,
|
||||
tagsExclude = ctx.args.tagsExclude,
|
||||
dueDateFrom = ctx.args.daysBack.map(back => now - Duration.days(back.toLong)),
|
||||
|
@ -62,7 +62,7 @@ object ConvertPdf {
|
||||
Conversion.create[F](cfg, sanitizeHtml, ctx.blocker, ctx.logger).use { conv =>
|
||||
mime match {
|
||||
case mt if mt.baseEqual(Mimetype.`application/pdf`) =>
|
||||
ctx.logger.info("Not going to convert a PDF file into a PDF.") *>
|
||||
ctx.logger.debug(s"Not going to convert a PDF file ${ra.name} into a PDF.") *>
|
||||
(ra, None: Option[RAttachmentMeta]).pure[F]
|
||||
|
||||
case _ =>
|
||||
|
@ -32,44 +32,75 @@ object CreateItem {
|
||||
|
||||
def fileMetas(itemId: Ident, now: Timestamp) =
|
||||
Stream
|
||||
.emits(ctx.args.files)
|
||||
.flatMap(f => ctx.store.bitpeace.get(f.fileMetaId.id).map(fm => (f, fm)))
|
||||
.collect({ case (f, Some(fm)) if isValidFile(fm) => f })
|
||||
.zipWithIndex
|
||||
.evalMap({
|
||||
case (f, index) =>
|
||||
Ident
|
||||
.randomId[F]
|
||||
.map(id =>
|
||||
RAttachment(id, itemId, f.fileMetaId, index.toInt, now, f.name)
|
||||
)
|
||||
})
|
||||
.eval(ctx.store.transact(RAttachment.nextPosition(itemId)))
|
||||
.flatMap { offset =>
|
||||
Stream
|
||||
.emits(ctx.args.files)
|
||||
.flatMap(f => ctx.store.bitpeace.get(f.fileMetaId.id).map(fm => (f, fm)))
|
||||
.collect({ case (f, Some(fm)) if isValidFile(fm) => f })
|
||||
.zipWithIndex
|
||||
.evalMap({
|
||||
case (f, index) =>
|
||||
Ident
|
||||
.randomId[F]
|
||||
.map(id =>
|
||||
RAttachment(
|
||||
id,
|
||||
itemId,
|
||||
f.fileMetaId,
|
||||
index.toInt + offset,
|
||||
now,
|
||||
f.name
|
||||
)
|
||||
)
|
||||
})
|
||||
}
|
||||
.compile
|
||||
.toVector
|
||||
|
||||
val item = RItem.newItem[F](
|
||||
ctx.args.meta.collective,
|
||||
ctx.args.makeSubject,
|
||||
ctx.args.meta.sourceAbbrev,
|
||||
ctx.args.meta.direction.getOrElse(Direction.Incoming),
|
||||
ItemState.Premature
|
||||
)
|
||||
val loadItemOrInsertNew =
|
||||
ctx.args.meta.itemId match {
|
||||
case Some(id) =>
|
||||
(for {
|
||||
_ <- OptionT.liftF(
|
||||
ctx.logger.info(
|
||||
s"Loading item with id ${id.id} to ammend"
|
||||
)
|
||||
)
|
||||
item <- OptionT(
|
||||
ctx.store
|
||||
.transact(RItem.findByIdAndCollective(id, ctx.args.meta.collective))
|
||||
)
|
||||
} yield (1, item))
|
||||
.getOrElseF(Sync[F].raiseError(new Exception(s"Item not found.")))
|
||||
case None =>
|
||||
for {
|
||||
_ <- ctx.logger.info(
|
||||
s"Creating new item with ${ctx.args.files.size} attachment(s)"
|
||||
)
|
||||
item <- RItem.newItem[F](
|
||||
ctx.args.meta.collective,
|
||||
ctx.args.makeSubject,
|
||||
ctx.args.meta.sourceAbbrev,
|
||||
ctx.args.meta.direction.getOrElse(Direction.Incoming),
|
||||
ItemState.Premature
|
||||
)
|
||||
n <- ctx.store.transact(RItem.insert(item))
|
||||
} yield (n, item)
|
||||
}
|
||||
|
||||
for {
|
||||
_ <- ctx.logger.info(
|
||||
s"Creating new item with ${ctx.args.files.size} attachment(s)"
|
||||
)
|
||||
time <- Duration.stopTime[F]
|
||||
it <- item
|
||||
n <- ctx.store.transact(RItem.insert(it))
|
||||
_ <- if (n != 1) storeItemError[F](ctx) else ().pure[F]
|
||||
fm <- fileMetas(it.id, it.created)
|
||||
it <- loadItemOrInsertNew
|
||||
_ <- if (it._1 != 1) storeItemError[F](ctx) else ().pure[F]
|
||||
now <- Timestamp.current[F]
|
||||
fm <- fileMetas(it._2.id, now)
|
||||
k <- fm.traverse(insertAttachment(ctx))
|
||||
_ <- logDifferences(ctx, fm, k.sum)
|
||||
dur <- time
|
||||
_ <- ctx.logger.info(s"Creating item finished in ${dur.formatExact}")
|
||||
} yield ItemData(
|
||||
it,
|
||||
it._2,
|
||||
fm,
|
||||
Vector.empty,
|
||||
Vector.empty,
|
||||
@ -86,10 +117,11 @@ object CreateItem {
|
||||
} yield n)
|
||||
}
|
||||
|
||||
def findExisting[F[_]: Sync]: Task[F, ProcessItemArgs, Option[ItemData]] =
|
||||
private def findExisting[F[_]: Sync]: Task[F, ProcessItemArgs, Option[ItemData]] =
|
||||
Task { ctx =>
|
||||
val fileMetaIds = ctx.args.files.map(_.fileMetaId).toSet
|
||||
for {
|
||||
cand <- ctx.store.transact(QItem.findByFileIds(ctx.args.files.map(_.fileMetaId)))
|
||||
cand <- ctx.store.transact(QItem.findByFileIds(fileMetaIds.toSeq))
|
||||
_ <-
|
||||
if (cand.nonEmpty) ctx.logger.warn("Found existing item with these files.")
|
||||
else ().pure[F]
|
||||
@ -99,8 +131,11 @@ object CreateItem {
|
||||
ctx.logger.warn(s"Removed ${ht.sum} items with same attachments")
|
||||
else ().pure[F]
|
||||
rms <- OptionT(
|
||||
//load attachments but only those mentioned in the task's arguments
|
||||
cand.headOption.traverse(ri =>
|
||||
ctx.store.transact(RAttachment.findByItemAndCollective(ri.id, ri.cid))
|
||||
ctx.store
|
||||
.transact(RAttachment.findByItemAndCollective(ri.id, ri.cid))
|
||||
.map(_.filter(r => fileMetaIds.contains(r.fileId)))
|
||||
)
|
||||
).getOrElse(Vector.empty)
|
||||
orig <- rms.traverse(a =>
|
||||
|
@ -13,6 +13,8 @@ import docspell.store.records._
|
||||
import docspell.files.Zip
|
||||
import cats.kernel.Monoid
|
||||
import emil.Mail
|
||||
import cats.kernel.Order
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
/** Goes through all attachments and extracts archive files, like zip
|
||||
* files. The process is recursive, until all archives have been
|
||||
@ -46,22 +48,37 @@ object ExtractArchive {
|
||||
archive: Option[RAttachmentArchive]
|
||||
): Task[F, ProcessItemArgs, (Option[RAttachmentArchive], ItemData)] =
|
||||
Task { ctx =>
|
||||
def extract(ra: RAttachment) =
|
||||
findMime(ctx)(ra).flatMap(m => extractSafe(ctx, archive)(ra, m))
|
||||
def extract(ra: RAttachment, pos: Int): F[Extracted] =
|
||||
findMime(ctx)(ra).flatMap(m => extractSafe(ctx, archive)(ra, pos, m))
|
||||
|
||||
for {
|
||||
ras <- item.attachments.traverse(extract)
|
||||
nra = ras.flatMap(_.files).zipWithIndex.map(t => t._1.copy(position = t._2))
|
||||
_ <- nra.traverse(storeAttachment(ctx))
|
||||
naa = ras.flatMap(_.archives)
|
||||
lastPos <- ctx.store.transact(RAttachment.nextPosition(item.item.id))
|
||||
extracts <-
|
||||
item.attachments.zipWithIndex
|
||||
.traverse(t => extract(t._1, lastPos + t._2))
|
||||
.map(Monoid[Extracted].combineAll)
|
||||
.map(fixPositions)
|
||||
nra = extracts.files
|
||||
_ <- extracts.files.traverse(storeAttachment(ctx))
|
||||
naa = extracts.archives
|
||||
_ <- naa.traverse(storeArchive(ctx))
|
||||
} yield naa.headOption -> item.copy(
|
||||
attachments = nra,
|
||||
originFile = item.originFile ++ nra.map(a => a.id -> a.fileId).toMap,
|
||||
givenMeta = item.givenMeta.fillEmptyFrom(Monoid[Extracted].combineAll(ras).meta)
|
||||
givenMeta = item.givenMeta.fillEmptyFrom(extracts.meta)
|
||||
)
|
||||
}
|
||||
|
||||
/** After all files have been extracted, the `extract' contains the
|
||||
* whole (combined) result. This fixes positions of the attachments
|
||||
* such that the elements of an archive are "spliced" into the
|
||||
* attachment list at the position of the archive. If there is no
|
||||
* archive, positions don't need to be fixed.
|
||||
*/
|
||||
private def fixPositions(extract: Extracted): Extracted =
|
||||
if (extract.archives.isEmpty) extract
|
||||
else extract.updatePositions
|
||||
|
||||
def findMime[F[_]: Functor](ctx: Context[F, _])(ra: RAttachment): F[Mimetype] =
|
||||
OptionT(ctx.store.transact(RFileMeta.findById(ra.fileId)))
|
||||
.map(_.mimetype)
|
||||
@ -70,21 +87,21 @@ object ExtractArchive {
|
||||
def extractSafe[F[_]: ConcurrentEffect: ContextShift](
|
||||
ctx: Context[F, ProcessItemArgs],
|
||||
archive: Option[RAttachmentArchive]
|
||||
)(ra: RAttachment, mime: Mimetype): F[Extracted] =
|
||||
)(ra: RAttachment, pos: Int, mime: Mimetype): F[Extracted] =
|
||||
mime match {
|
||||
case Mimetype("application", "zip", _) if ra.name.exists(_.endsWith(".zip")) =>
|
||||
ctx.logger.info(s"Extracting zip archive ${ra.name.getOrElse("<noname>")}.") *>
|
||||
extractZip(ctx, archive)(ra)
|
||||
extractZip(ctx, archive)(ra, pos)
|
||||
.flatTap(_ => cleanupParents(ctx, ra, archive))
|
||||
|
||||
case Mimetype("message", "rfc822", _) =>
|
||||
ctx.logger.info(s"Reading e-mail ${ra.name.getOrElse("<noname>")}") *>
|
||||
extractMail(ctx, archive)(ra)
|
||||
extractMail(ctx, archive)(ra, pos)
|
||||
.flatTap(_ => cleanupParents(ctx, ra, archive))
|
||||
|
||||
case _ =>
|
||||
ctx.logger.debug(s"Not an archive: ${mime.asString}") *>
|
||||
Extracted.noArchive(ra).pure[F]
|
||||
Extracted.noArchive(ra, pos, 0).pure[F]
|
||||
}
|
||||
|
||||
def cleanupParents[F[_]: Sync](
|
||||
@ -114,7 +131,7 @@ object ExtractArchive {
|
||||
def extractZip[F[_]: ConcurrentEffect: ContextShift](
|
||||
ctx: Context[F, _],
|
||||
archive: Option[RAttachmentArchive]
|
||||
)(ra: RAttachment): F[Extracted] = {
|
||||
)(ra: RAttachment, pos: Int): F[Extracted] = {
|
||||
val zipData = ctx.store.bitpeace
|
||||
.get(ra.fileId.id)
|
||||
.unNoneTerminate
|
||||
@ -122,7 +139,8 @@ object ExtractArchive {
|
||||
|
||||
zipData
|
||||
.through(Zip.unzipP[F](8192, ctx.blocker))
|
||||
.flatMap(handleEntry(ctx, ra, archive, None))
|
||||
.zipWithIndex
|
||||
.flatMap(handleEntry(ctx, ra, pos, archive, None))
|
||||
.foldMonoid
|
||||
.compile
|
||||
.lastOrError
|
||||
@ -131,7 +149,7 @@ object ExtractArchive {
|
||||
def extractMail[F[_]: ConcurrentEffect: ContextShift](
|
||||
ctx: Context[F, _],
|
||||
archive: Option[RAttachmentArchive]
|
||||
)(ra: RAttachment): F[Extracted] = {
|
||||
)(ra: RAttachment, pos: Int): F[Extracted] = {
|
||||
val email: Stream[F, Byte] = ctx.store.bitpeace
|
||||
.get(ra.fileId.id)
|
||||
.unNoneTerminate
|
||||
@ -149,7 +167,8 @@ object ExtractArchive {
|
||||
|
||||
ReadMail
|
||||
.mailToEntries(ctx.logger)(mail)
|
||||
.flatMap(handleEntry(ctx, ra, archive, mId)) ++ Stream.eval(givenMeta)
|
||||
.zipWithIndex
|
||||
.flatMap(handleEntry(ctx, ra, pos, archive, mId)) ++ Stream.eval(givenMeta)
|
||||
}
|
||||
.foldMonoid
|
||||
.compile
|
||||
@ -165,13 +184,15 @@ object ExtractArchive {
|
||||
def handleEntry[F[_]: Sync](
|
||||
ctx: Context[F, _],
|
||||
ra: RAttachment,
|
||||
pos: Int,
|
||||
archive: Option[RAttachmentArchive],
|
||||
messageId: Option[String]
|
||||
)(
|
||||
entry: Binary[F]
|
||||
tentry: (Binary[F], Long)
|
||||
): Stream[F, Extracted] = {
|
||||
val mimeHint = MimetypeHint.filename(entry.name).withAdvertised(entry.mime.asString)
|
||||
val fileMeta = ctx.store.bitpeace.saveNew(entry.data, 8192, mimeHint)
|
||||
val (entry, subPos) = tentry
|
||||
val mimeHint = MimetypeHint.filename(entry.name).withAdvertised(entry.mime.asString)
|
||||
val fileMeta = ctx.store.bitpeace.saveNew(entry.data, 8192, mimeHint)
|
||||
Stream.eval(ctx.logger.debug(s"Extracted ${entry.name}. Storing as attachment.")) >>
|
||||
fileMeta.evalMap { fm =>
|
||||
Ident.randomId.map { id =>
|
||||
@ -179,12 +200,12 @@ object ExtractArchive {
|
||||
id,
|
||||
ra.itemId,
|
||||
Ident.unsafe(fm.id),
|
||||
0, //position is updated afterwards
|
||||
pos,
|
||||
ra.created,
|
||||
Option(entry.name).map(_.trim).filter(_.nonEmpty)
|
||||
)
|
||||
val aa = archive.getOrElse(RAttachmentArchive.of(ra, messageId)).copy(id = id)
|
||||
Extracted.of(nra, aa)
|
||||
Extracted.of(nra, aa, pos, subPos.toInt)
|
||||
}
|
||||
}
|
||||
|
||||
@ -204,28 +225,67 @@ object ExtractArchive {
|
||||
case class Extracted(
|
||||
files: Vector[RAttachment],
|
||||
archives: Vector[RAttachmentArchive],
|
||||
meta: MetaProposalList
|
||||
meta: MetaProposalList,
|
||||
positions: List[Extracted.Pos]
|
||||
) {
|
||||
def ++(e: Extracted) =
|
||||
Extracted(files ++ e.files, archives ++ e.archives, meta.fillEmptyFrom(e.meta))
|
||||
Extracted(
|
||||
files ++ e.files,
|
||||
archives ++ e.archives,
|
||||
meta.fillEmptyFrom(e.meta),
|
||||
positions ++ e.positions
|
||||
)
|
||||
|
||||
def setMeta(m: MetaProposal): Extracted =
|
||||
setMeta(MetaProposalList.of(m))
|
||||
|
||||
def setMeta(ml: MetaProposalList): Extracted =
|
||||
Extracted(files, archives, meta.fillEmptyFrom(ml))
|
||||
Extracted(files, archives, meta.fillEmptyFrom(ml), positions)
|
||||
|
||||
def updatePositions: Extracted =
|
||||
NonEmptyList.fromList(positions) match {
|
||||
case None =>
|
||||
this
|
||||
case Some(nel) =>
|
||||
val sorted = nel.sorted
|
||||
println(s"---------------------------- $sorted ")
|
||||
val offset = sorted.head.first
|
||||
val pos =
|
||||
sorted.zipWithIndex.map({ case (p, i) => p.id -> (i + offset) }).toList.toMap
|
||||
val nf =
|
||||
files.map(f => pos.get(f.id).map(n => f.copy(position = n)).getOrElse(f))
|
||||
copy(files = nf)
|
||||
}
|
||||
}
|
||||
object Extracted {
|
||||
val empty = Extracted(Vector.empty, Vector.empty, MetaProposalList.empty)
|
||||
val empty =
|
||||
Extracted(Vector.empty, Vector.empty, MetaProposalList.empty, Nil)
|
||||
|
||||
def noArchive(ra: RAttachment): Extracted =
|
||||
Extracted(Vector(ra), Vector.empty, MetaProposalList.empty)
|
||||
def noArchive(ra: RAttachment, pos: Int, subPos: Int): Extracted =
|
||||
Extracted(
|
||||
Vector(ra),
|
||||
Vector.empty,
|
||||
MetaProposalList.empty,
|
||||
List(Pos(ra.id, pos, subPos))
|
||||
)
|
||||
|
||||
def of(ra: RAttachment, aa: RAttachmentArchive): Extracted =
|
||||
Extracted(Vector(ra), Vector(aa), MetaProposalList.empty)
|
||||
def of(ra: RAttachment, aa: RAttachmentArchive, pos: Int, subPos: Int): Extracted =
|
||||
Extracted(
|
||||
Vector(ra),
|
||||
Vector(aa),
|
||||
MetaProposalList.empty,
|
||||
List(Pos(ra.id, pos, subPos))
|
||||
)
|
||||
|
||||
implicit val extractedMonoid: Monoid[Extracted] =
|
||||
Monoid.instance(empty, _ ++ _)
|
||||
|
||||
case class Pos(id: Ident, first: Int, second: Int)
|
||||
|
||||
object Pos {
|
||||
implicit val ordering: Order[Pos] =
|
||||
Order.whenEqual(Order.by(_.first), Order.by(_.second))
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -2,19 +2,20 @@ package docspell.joex.process
|
||||
|
||||
import cats.implicits._
|
||||
import cats.effect._
|
||||
import fs2.Stream
|
||||
import docspell.common.{ItemState, ProcessItemArgs}
|
||||
import docspell.joex.Config
|
||||
import docspell.joex.scheduler.{Context, Task}
|
||||
import docspell.joex.scheduler.Task
|
||||
import docspell.store.queries.QItem
|
||||
import docspell.store.records.{RItem, RJob}
|
||||
import docspell.store.records.RItem
|
||||
|
||||
object ItemHandler {
|
||||
def onCancel[F[_]: Sync: ContextShift]: Task[F, ProcessItemArgs, Unit] =
|
||||
logWarn("Now cancelling. Deleting potentially created data.").flatMap(_ =>
|
||||
deleteByFileIds
|
||||
deleteByFileIds.flatMap(_ => deleteFiles)
|
||||
)
|
||||
|
||||
def apply[F[_]: ConcurrentEffect: ContextShift](
|
||||
def newItem[F[_]: ConcurrentEffect: ContextShift](
|
||||
cfg: Config
|
||||
): Task[F, ProcessItemArgs, Unit] =
|
||||
CreateItem[F]
|
||||
@ -25,18 +26,19 @@ object ItemHandler {
|
||||
def itemStateTask[F[_]: Sync, A](
|
||||
state: ItemState
|
||||
)(data: ItemData): Task[F, A, ItemData] =
|
||||
Task(ctx => ctx.store.transact(RItem.updateState(data.item.id, state)).map(_ => data))
|
||||
Task(ctx =>
|
||||
ctx.store
|
||||
.transact(RItem.updateState(data.item.id, state, ItemState.invalidStates))
|
||||
.map(_ => data)
|
||||
)
|
||||
|
||||
def isLastRetry[F[_]: Sync, A](ctx: Context[F, A]): F[Boolean] =
|
||||
for {
|
||||
current <- ctx.store.transact(RJob.getRetries(ctx.jobId))
|
||||
last = ctx.config.retries == current.getOrElse(0)
|
||||
} yield last
|
||||
def isLastRetry[F[_]: Sync]: Task[F, ProcessItemArgs, Boolean] =
|
||||
Task(_.isLastRetry)
|
||||
|
||||
def safeProcess[F[_]: ConcurrentEffect: ContextShift](
|
||||
cfg: Config
|
||||
)(data: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
||||
Task(isLastRetry[F, ProcessItemArgs] _).flatMap {
|
||||
isLastRetry[F].flatMap {
|
||||
case true =>
|
||||
ProcessItem[F](cfg)(data).attempt.flatMap({
|
||||
case Right(d) =>
|
||||
@ -60,6 +62,15 @@ object ItemHandler {
|
||||
} yield ()
|
||||
}
|
||||
|
||||
private def deleteFiles[F[_]: Sync]: Task[F, ProcessItemArgs, Unit] =
|
||||
Task(ctx =>
|
||||
Stream
|
||||
.emits(ctx.args.files.map(_.fileMetaId.id))
|
||||
.flatMap(id => ctx.store.bitpeace.delete(id).attempt.drain)
|
||||
.compile
|
||||
.drain
|
||||
)
|
||||
|
||||
private def logWarn[F[_]](msg: => String): Task[F, ProcessItemArgs, Unit] =
|
||||
Task(_.logger.warn(msg))
|
||||
}
|
||||
|
@ -9,21 +9,26 @@ import docspell.store.records.RItem
|
||||
object LinkProposal {
|
||||
|
||||
def apply[F[_]: Sync](data: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
||||
Task { ctx =>
|
||||
// sort by weight; order of equal weights is not important, just
|
||||
// choose one others are then suggestions
|
||||
// doc-date is only set when given explicitely, not from "guessing"
|
||||
val proposals = MetaProposalList
|
||||
.flatten(data.metas.map(_.proposals))
|
||||
.filter(_.proposalType != MetaProposalType.DocDate)
|
||||
.sortByWeights
|
||||
if (data.item.state.isValid)
|
||||
Task
|
||||
.log[F, ProcessItemArgs](_.debug(s"Not linking proposals on existing item"))
|
||||
.map(_ => data)
|
||||
else
|
||||
Task { ctx =>
|
||||
// sort by weight; order of equal weights is not important, just
|
||||
// choose one others are then suggestions
|
||||
// doc-date is only set when given explicitely, not from "guessing"
|
||||
val proposals = MetaProposalList
|
||||
.flatten(data.metas.map(_.proposals))
|
||||
.filter(_.proposalType != MetaProposalType.DocDate)
|
||||
.sortByWeights
|
||||
|
||||
ctx.logger.info(s"Starting linking proposals") *>
|
||||
MetaProposalType.all
|
||||
.traverse(applyValue(data, proposals, ctx))
|
||||
.map(result => ctx.logger.info(s"Results from proposal processing: $result"))
|
||||
.map(_ => data)
|
||||
}
|
||||
ctx.logger.info(s"Starting linking proposals") *>
|
||||
MetaProposalType.all
|
||||
.traverse(applyValue(data, proposals, ctx))
|
||||
.map(result => ctx.logger.info(s"Results from proposal processing: $result"))
|
||||
.map(_ => data)
|
||||
}
|
||||
|
||||
def applyValue[F[_]: Sync](
|
||||
data: ItemData,
|
||||
@ -40,8 +45,9 @@ object LinkProposal {
|
||||
Result.single(mpt)
|
||||
)
|
||||
case Some(a) =>
|
||||
val ids = a.values.map(_.ref.id.id)
|
||||
ctx.logger.info(
|
||||
s"Found many (${a.size}, ${a.values.map(_.ref.id.id)}) candidates for ${a.proposalType}. Setting first."
|
||||
s"Found many (${a.size}, ${ids}) candidates for ${a.proposalType}. Setting first."
|
||||
) *>
|
||||
setItemMeta(data.item.id, ctx, a.proposalType, a.values.head.ref.id).map(_ =>
|
||||
Result.multiple(mpt)
|
||||
|
@ -12,11 +12,13 @@ object ProcessItem {
|
||||
cfg: Config
|
||||
)(item: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
||||
ExtractArchive(item)
|
||||
.flatMap(Task.setProgress(20))
|
||||
.flatMap(ConvertPdf(cfg.convert, _))
|
||||
.flatMap(Task.setProgress(40))
|
||||
.flatMap(TextExtraction(cfg.extraction, _))
|
||||
.flatMap(Task.setProgress(50))
|
||||
.flatMap(Task.setProgress(60))
|
||||
.flatMap(analysisOnly[F](cfg.textAnalysis))
|
||||
.flatMap(Task.setProgress(75))
|
||||
.flatMap(Task.setProgress(80))
|
||||
.flatMap(LinkProposal[F])
|
||||
.flatMap(Task.setProgress(99))
|
||||
|
||||
|
@ -60,7 +60,7 @@ object TextExtraction {
|
||||
rm => rm.setContentIfEmpty(txt.map(_.trim).filter(_.nonEmpty))
|
||||
)
|
||||
est <- dst
|
||||
_ <- ctx.logger.debug(
|
||||
_ <- ctx.logger.info(
|
||||
s"Extracting text for attachment ${stripAttachmentName(ra)} finished in ${est.formatExact}"
|
||||
)
|
||||
} yield meta
|
||||
|
@ -259,7 +259,7 @@ object ScanMailboxTask {
|
||||
priority = Priority.Low,
|
||||
tracker = None
|
||||
)
|
||||
res <- upload.submit(data, ctx.args.account, false)
|
||||
res <- upload.submit(data, ctx.args.account, false, None)
|
||||
} yield res
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
package docspell.joex.scheduler
|
||||
|
||||
import cats.Functor
|
||||
import cats.effect.{Blocker, Concurrent}
|
||||
import cats.{Applicative, Functor}
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.Store
|
||||
@ -23,6 +23,12 @@ trait Context[F[_], A] { self =>
|
||||
|
||||
def store: Store[F]
|
||||
|
||||
final def isLastRetry(implicit ev: Applicative[F]): F[Boolean] =
|
||||
for {
|
||||
current <- store.transact(RJob.getRetries(jobId))
|
||||
last = config.retries == current.getOrElse(0)
|
||||
} yield last
|
||||
|
||||
def blocker: Blocker
|
||||
|
||||
def map[C](f: A => C)(implicit F: Functor[F]): Context[F, C] =
|
||||
|
@ -87,12 +87,6 @@ paths:
|
||||
The upload meta data can be used to tell, whether multiple
|
||||
files are one item, or if each file should become a single
|
||||
item. By default, each file will be a one item.
|
||||
|
||||
Only certain file types are supported:
|
||||
|
||||
* application/pdf
|
||||
|
||||
Support for more types might be added.
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/id"
|
||||
requestBody:
|
||||
@ -115,6 +109,48 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BasicResult"
|
||||
/open/upload/item/{itemId}/{id}:
|
||||
post:
|
||||
tags: [ Upload ]
|
||||
summary: Upload files to docspell.
|
||||
description: |
|
||||
Upload a file to docspell for processing. The id is a *source
|
||||
id* configured by a collective. Files are submitted for
|
||||
processing which eventually resuts in an item in the inbox of
|
||||
the corresponding collective. This endpoint associates the
|
||||
files to an existing item identified by its `itemId`.
|
||||
|
||||
The request must be a `multipart/form-data` request, where the
|
||||
first part has name `meta`, is optional and may contain upload
|
||||
metadata as JSON. Checkout the structure `ItemUploadMeta` at
|
||||
the end if it is not shown here. Other parts specify the
|
||||
files. Multiple files can be specified, but at least on is
|
||||
required.
|
||||
|
||||
Upload meta data is ignored.
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/id"
|
||||
- $ref: "#/components/parameters/itemId"
|
||||
requestBody:
|
||||
content:
|
||||
multipart/form-data:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
meta:
|
||||
$ref: "#/components/schemas/ItemUploadMeta"
|
||||
file:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
format: binary
|
||||
responses:
|
||||
200:
|
||||
description: Ok
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BasicResult"
|
||||
/sec/checkfile/{checksum}:
|
||||
get:
|
||||
tags: [ Upload ]
|
||||
@ -155,12 +191,6 @@ paths:
|
||||
The upload meta data can be used to tell, whether multiple
|
||||
files are one item, or if each file should become a single
|
||||
item. By default, each file will be a one item.
|
||||
|
||||
Only certain file types are supported:
|
||||
|
||||
* application/pdf
|
||||
|
||||
Support for more types might be added.
|
||||
security:
|
||||
- authTokenHeader: []
|
||||
requestBody:
|
||||
@ -183,6 +213,50 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BasicResult"
|
||||
/sec/upload/{itemId}:
|
||||
post:
|
||||
tags: [ Upload ]
|
||||
summary: Upload files to docspell.
|
||||
description: |
|
||||
Upload files to docspell for processing. This route is meant
|
||||
for authenticated users that upload files to their account.
|
||||
This endpoint will associate the files to an existing item
|
||||
identified by its `itemId`.
|
||||
|
||||
Everything else is the same as with the
|
||||
`/open/upload/item/{itemId}/{id}` endpoint.
|
||||
|
||||
The request must be a "multipart/form-data" request, where the
|
||||
first part is optional and may contain upload metadata as
|
||||
JSON. Other parts specify the files. Multiple files can be
|
||||
specified, but at least on is required.
|
||||
|
||||
The upload meta data is ignored, since the item already
|
||||
exists.
|
||||
security:
|
||||
- authTokenHeader: []
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/itemId"
|
||||
requestBody:
|
||||
content:
|
||||
multipart/form-data:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
meta:
|
||||
$ref: "#/components/schemas/ItemUploadMeta"
|
||||
file:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
format: binary
|
||||
responses:
|
||||
200:
|
||||
description: Ok
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BasicResult"
|
||||
/open/signup/register:
|
||||
post:
|
||||
tags: [ Registration ]
|
||||
@ -3156,6 +3230,13 @@ components:
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
itemId:
|
||||
name: itemId
|
||||
in: path
|
||||
description: An identifier for an item
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
full:
|
||||
name: full
|
||||
in: query
|
||||
|
@ -109,7 +109,7 @@ trait Conversions {
|
||||
coll,
|
||||
m.name,
|
||||
if (m.inbox) Seq(ItemState.Created)
|
||||
else ItemState.validStates,
|
||||
else ItemState.validStates.toList,
|
||||
m.direction,
|
||||
m.corrPerson,
|
||||
m.corrOrg,
|
||||
@ -470,6 +470,7 @@ trait Conversions {
|
||||
case UploadResult.Success => BasicResult(true, "Files submitted.")
|
||||
case UploadResult.NoFiles => BasicResult(false, "There were no files to submit.")
|
||||
case UploadResult.NoSource => BasicResult(false, "The source id is not valid.")
|
||||
case UploadResult.NoItem => BasicResult(false, "The item could not be found.")
|
||||
}
|
||||
|
||||
def basicResult(cr: PassChangeResult): BasicResult =
|
||||
|
@ -80,7 +80,7 @@ object IntegrationEndpointRoutes {
|
||||
cfg.backend.files.validMimeTypes
|
||||
)
|
||||
account = AccountId(coll, Ident.unsafe("docspell-system"))
|
||||
result <- backend.upload.submit(updata, account, true)
|
||||
result <- backend.upload.submit(updata, account, true, None)
|
||||
res <- Ok(basicResult(result))
|
||||
} yield res
|
||||
}
|
||||
|
@ -2,13 +2,13 @@ package docspell.restserver.routes
|
||||
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
import docspell.common._
|
||||
import docspell.backend.BackendApp
|
||||
import docspell.backend.auth.AuthToken
|
||||
import docspell.common.{Ident, Priority}
|
||||
import docspell.restserver.Config
|
||||
import docspell.restserver.conv.Conversions._
|
||||
import docspell.restserver.http4s.ResponseGenerator
|
||||
import org.http4s.HttpRoutes
|
||||
import org.http4s._
|
||||
import org.http4s.circe.CirceEntityEncoder._
|
||||
import org.http4s.EntityDecoder._
|
||||
import org.http4s.dsl.Http4sDsl
|
||||
@ -26,19 +26,14 @@ object UploadRoutes {
|
||||
val dsl = new Http4sDsl[F] with ResponseGenerator[F] {}
|
||||
import dsl._
|
||||
|
||||
val submitting = submitFiles[F](backend, cfg, Right(user.account)) _
|
||||
|
||||
HttpRoutes.of {
|
||||
case req @ POST -> Root / "item" =>
|
||||
for {
|
||||
multipart <- req.as[Multipart[F]]
|
||||
updata <- readMultipart(
|
||||
multipart,
|
||||
logger,
|
||||
Priority.High,
|
||||
cfg.backend.files.validMimeTypes
|
||||
)
|
||||
result <- backend.upload.submit(updata, user.account, true)
|
||||
res <- Ok(basicResult(result))
|
||||
} yield res
|
||||
submitting(req, None, Priority.High, dsl)
|
||||
|
||||
case req @ POST -> Root / "item" / Ident(itemId) =>
|
||||
submitting(req, Some(itemId), Priority.High, dsl)
|
||||
}
|
||||
}
|
||||
|
||||
@ -48,17 +43,35 @@ object UploadRoutes {
|
||||
|
||||
HttpRoutes.of {
|
||||
case req @ POST -> Root / "item" / Ident(id) =>
|
||||
for {
|
||||
multipart <- req.as[Multipart[F]]
|
||||
updata <- readMultipart(
|
||||
multipart,
|
||||
logger,
|
||||
Priority.Low,
|
||||
cfg.backend.files.validMimeTypes
|
||||
)
|
||||
result <- backend.upload.submit(updata, id, true)
|
||||
res <- Ok(basicResult(result))
|
||||
} yield res
|
||||
submitFiles(backend, cfg, Left(id))(req, None, Priority.Low, dsl)
|
||||
|
||||
case req @ POST -> Root / "item" / Ident(itemId) / Ident(id) =>
|
||||
submitFiles(backend, cfg, Left(id))(req, Some(itemId), Priority.Low, dsl)
|
||||
}
|
||||
}
|
||||
|
||||
private def submitFiles[F[_]: Effect](
|
||||
backend: BackendApp[F],
|
||||
cfg: Config,
|
||||
accOrSrc: Either[Ident, AccountId]
|
||||
)(
|
||||
req: Request[F],
|
||||
itemId: Option[Ident],
|
||||
prio: Priority,
|
||||
dsl: Http4sDsl[F]
|
||||
): F[Response[F]] = {
|
||||
import dsl._
|
||||
|
||||
for {
|
||||
multipart <- req.as[Multipart[F]]
|
||||
updata <- readMultipart(
|
||||
multipart,
|
||||
logger,
|
||||
prio,
|
||||
cfg.backend.files.validMimeTypes
|
||||
)
|
||||
result <- backend.upload.submitEither(updata, accOrSrc, true, itemId)
|
||||
res <- Ok(basicResult(result))
|
||||
} yield res
|
||||
}
|
||||
}
|
||||
|
@ -101,4 +101,6 @@ case class Column(name: String, ns: String = "", alias: String = "") {
|
||||
def asc: Fragment =
|
||||
f ++ fr"asc"
|
||||
|
||||
def max: Fragment =
|
||||
fr"MAX(" ++ f ++ fr")"
|
||||
}
|
||||
|
@ -287,7 +287,7 @@ object QItem {
|
||||
n <- store.transact(RItem.deleteByIdAndCollective(itemId, collective))
|
||||
} yield tn + rn + n
|
||||
|
||||
def findByFileIds(fileMetaIds: List[Ident]): ConnectionIO[Vector[RItem]] = {
|
||||
def findByFileIds(fileMetaIds: Seq[Ident]): ConnectionIO[Vector[RItem]] = {
|
||||
val IC = RItem.Columns
|
||||
val AC = RAttachment.Columns
|
||||
val q =
|
||||
|
@ -38,6 +38,11 @@ object RAttachment {
|
||||
fr"${v.id},${v.itemId},${v.fileId.id},${v.position},${v.created},${v.name}"
|
||||
).update.run
|
||||
|
||||
def nextPosition(id: Ident): ConnectionIO[Int] =
|
||||
for {
|
||||
max <- selectSimple(position.max, table, itemId.is(id)).query[Option[Int]].unique
|
||||
} yield max.map(_ + 1).getOrElse(0)
|
||||
|
||||
def updateFileIdAndName(
|
||||
attachId: Ident,
|
||||
fId: Ident,
|
||||
|
@ -1,7 +1,8 @@
|
||||
package docspell.store.records
|
||||
|
||||
import cats.implicits._
|
||||
import cats.data.NonEmptyList
|
||||
import cats.effect.Sync
|
||||
import cats.implicits._
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common._
|
||||
@ -110,12 +111,16 @@ object RItem {
|
||||
def getCollective(itemId: Ident): ConnectionIO[Option[Ident]] =
|
||||
selectSimple(List(cid), table, id.is(itemId)).query[Ident].option
|
||||
|
||||
def updateState(itemId: Ident, itemState: ItemState): ConnectionIO[Int] =
|
||||
def updateState(
|
||||
itemId: Ident,
|
||||
itemState: ItemState,
|
||||
existing: NonEmptyList[ItemState]
|
||||
): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(
|
||||
table,
|
||||
id.is(itemId),
|
||||
and(id.is(itemId), state.isIn(existing)),
|
||||
commas(state.setTo(itemState), updated.setTo(t))
|
||||
).update.run
|
||||
} yield n
|
||||
@ -285,4 +290,7 @@ object RItem {
|
||||
|
||||
def existsById(itemId: Ident): ConnectionIO[Boolean] =
|
||||
selectCount(id, table, id.is(itemId)).query[Int].unique.map(_ > 0)
|
||||
|
||||
def findByIdAndCollective(itemId: Ident, coll: Ident): ConnectionIO[Option[RItem]] =
|
||||
selectSimple(all, table, and(id.is(itemId), cid.is(coll))).query[RItem].option
|
||||
}
|
||||
|
@ -71,6 +71,7 @@ module Api exposing
|
||||
, submitNotifyDueItems
|
||||
, updateScanMailbox
|
||||
, upload
|
||||
, uploadAmend
|
||||
, uploadSingle
|
||||
, versionInfo
|
||||
)
|
||||
@ -429,7 +430,42 @@ createImapSettings flags mname ems receive =
|
||||
--- Upload
|
||||
|
||||
|
||||
upload : Flags -> Maybe String -> ItemUploadMeta -> List File -> (String -> Result Http.Error BasicResult -> msg) -> List (Cmd msg)
|
||||
uploadAmend :
|
||||
Flags
|
||||
-> String
|
||||
-> List File
|
||||
-> (String -> Result Http.Error BasicResult -> msg)
|
||||
-> List (Cmd msg)
|
||||
uploadAmend flags itemId files receive =
|
||||
let
|
||||
mkReq file =
|
||||
let
|
||||
fid =
|
||||
Util.File.makeFileId file
|
||||
|
||||
path =
|
||||
"/api/v1/sec/upload/item/" ++ itemId
|
||||
in
|
||||
Http2.authPostTrack
|
||||
{ url = flags.config.baseUrl ++ path
|
||||
, account = getAccount flags
|
||||
, body =
|
||||
Http.multipartBody <|
|
||||
[ Http.filePart "file[]" file ]
|
||||
, expect = Http.expectJson (receive fid) Api.Model.BasicResult.decoder
|
||||
, tracker = fid
|
||||
}
|
||||
in
|
||||
List.map mkReq files
|
||||
|
||||
|
||||
upload :
|
||||
Flags
|
||||
-> Maybe String
|
||||
-> ItemUploadMeta
|
||||
-> List File
|
||||
-> (String -> Result Http.Error BasicResult -> msg)
|
||||
-> List (Cmd msg)
|
||||
upload flags sourceId meta files receive =
|
||||
let
|
||||
metaStr =
|
||||
@ -457,7 +493,14 @@ upload flags sourceId meta files receive =
|
||||
List.map mkReq files
|
||||
|
||||
|
||||
uploadSingle : Flags -> Maybe String -> ItemUploadMeta -> String -> List File -> (Result Http.Error BasicResult -> msg) -> Cmd msg
|
||||
uploadSingle :
|
||||
Flags
|
||||
-> Maybe String
|
||||
-> ItemUploadMeta
|
||||
-> String
|
||||
-> List File
|
||||
-> (Result Http.Error BasicResult -> msg)
|
||||
-> Cmd msg
|
||||
uploadSingle flags sourceId meta track files receive =
|
||||
let
|
||||
metaStr =
|
||||
|
@ -74,7 +74,7 @@ updateWithSub msg model =
|
||||
updateNewInvite m model |> noSub
|
||||
|
||||
ItemDetailMsg m ->
|
||||
updateItemDetail m model |> noSub
|
||||
updateItemDetail m model
|
||||
|
||||
VersionResp (Ok info) ->
|
||||
( { model | version = info }, Cmd.none ) |> noSub
|
||||
@ -172,17 +172,20 @@ updateWithSub msg model =
|
||||
( { model | navMenuOpen = not model.navMenuOpen }, Cmd.none, Sub.none )
|
||||
|
||||
|
||||
updateItemDetail : Page.ItemDetail.Data.Msg -> Model -> ( Model, Cmd Msg )
|
||||
updateItemDetail : Page.ItemDetail.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
||||
updateItemDetail lmsg model =
|
||||
let
|
||||
inav =
|
||||
Page.Home.Data.itemNav model.itemDetailModel.detail.item.id model.homeModel
|
||||
|
||||
( lm, lc ) =
|
||||
( lm, lc, ls ) =
|
||||
Page.ItemDetail.Update.update model.key model.flags inav.next lmsg model.itemDetailModel
|
||||
in
|
||||
( { model | itemDetailModel = lm }
|
||||
( { model
|
||||
| itemDetailModel = lm
|
||||
}
|
||||
, Cmd.map ItemDetailMsg lc
|
||||
, Sub.map ItemDetailMsg ls
|
||||
)
|
||||
|
||||
|
||||
@ -341,7 +344,19 @@ initPage model page =
|
||||
updateQueue Page.Queue.Data.StopRefresh model
|
||||
|
||||
ItemDetailPage id ->
|
||||
updateItemDetail (Page.ItemDetail.Data.Init id) model
|
||||
let
|
||||
updateDetail m__ =
|
||||
let
|
||||
( m, c, s ) =
|
||||
updateItemDetail (Page.ItemDetail.Data.Init id) m__
|
||||
in
|
||||
( { m | subs = Sub.batch [ m.subs, s ] }, c )
|
||||
in
|
||||
Util.Update.andThen1
|
||||
[ updateDetail
|
||||
, updateQueue Page.Queue.Data.StopRefresh
|
||||
]
|
||||
model
|
||||
|
||||
|
||||
noSub : ( Model, Cmd Msg ) -> ( Model, Cmd Msg, Sub Msg )
|
||||
|
@ -136,6 +136,12 @@ view model =
|
||||
[ i [ class "folder open icon" ] []
|
||||
, text "Select ..."
|
||||
]
|
||||
, div [ class "ui center aligned text container" ]
|
||||
[ span [ class "small-info" ]
|
||||
[ text "Choose document files (pdf, docx, txt, html, …). "
|
||||
, text "Archives (zip and eml) are extracted."
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,7 @@
|
||||
module Data.Icons exposing
|
||||
( concerned
|
||||
( addFiles
|
||||
, addFilesIcon
|
||||
, concerned
|
||||
, concernedIcon
|
||||
, correspondent
|
||||
, correspondentIcon
|
||||
@ -51,3 +53,13 @@ editNotes =
|
||||
editNotesIcon : Html msg
|
||||
editNotesIcon =
|
||||
i [ class editNotes ] []
|
||||
|
||||
|
||||
addFiles : String
|
||||
addFiles =
|
||||
"file plus icon"
|
||||
|
||||
|
||||
addFilesIcon : Html msg
|
||||
addFilesIcon =
|
||||
i [ class addFiles ] []
|
||||
|
@ -58,7 +58,9 @@ init flags url key =
|
||||
Nothing ->
|
||||
Cmd.none
|
||||
in
|
||||
( m, Cmd.batch [ cmd, Api.versionInfo flags VersionResp, sessionCheck ] )
|
||||
( m
|
||||
, Cmd.batch [ cmd, Api.versionInfo flags VersionResp, sessionCheck ]
|
||||
)
|
||||
|
||||
|
||||
viewDoc : Model -> Document Msg
|
||||
|
@ -7,25 +7,27 @@ import Data.Flags exposing (Flags)
|
||||
import Page.ItemDetail.Data exposing (Model, Msg(..))
|
||||
|
||||
|
||||
update : Nav.Key -> Flags -> Maybe String -> Msg -> Model -> ( Model, Cmd Msg )
|
||||
update : Nav.Key -> Flags -> Maybe String -> Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
||||
update key flags next msg model =
|
||||
case msg of
|
||||
Init id ->
|
||||
let
|
||||
( lm, lc ) =
|
||||
( lm, lc, ls ) =
|
||||
Comp.ItemDetail.update key flags next Comp.ItemDetail.Init model.detail
|
||||
in
|
||||
( { model | detail = lm }
|
||||
, Cmd.batch [ Api.itemDetail flags id ItemResp, Cmd.map ItemDetailMsg lc ]
|
||||
, Sub.map ItemDetailMsg ls
|
||||
)
|
||||
|
||||
ItemDetailMsg lmsg ->
|
||||
let
|
||||
( lm, lc ) =
|
||||
( lm, lc, ls ) =
|
||||
Comp.ItemDetail.update key flags next lmsg model.detail
|
||||
in
|
||||
( { model | detail = lm }
|
||||
, Cmd.map ItemDetailMsg lc
|
||||
, Sub.map ItemDetailMsg ls
|
||||
)
|
||||
|
||||
ItemResp (Ok item) ->
|
||||
@ -36,4 +38,4 @@ update key flags next msg model =
|
||||
update key flags next (ItemDetailMsg lmsg) model
|
||||
|
||||
ItemResp (Err _) ->
|
||||
( model, Cmd.none )
|
||||
( model, Cmd.none, Sub.none )
|
||||
|
@ -41,7 +41,12 @@ update sourceId flags msg model =
|
||||
|
||||
uploads =
|
||||
if model.singleItem then
|
||||
Api.uploadSingle flags sourceId meta uploadAllTracker model.files (SingleUploadResp uploadAllTracker)
|
||||
Api.uploadSingle flags
|
||||
sourceId
|
||||
meta
|
||||
uploadAllTracker
|
||||
model.files
|
||||
(SingleUploadResp uploadAllTracker)
|
||||
|
||||
else
|
||||
Cmd.batch (Api.upload flags sourceId meta model.files SingleUploadResp)
|
||||
|
@ -70,6 +70,9 @@
|
||||
.default-layout .ui.segment .item-notes {
|
||||
padding: 0 1em;
|
||||
}
|
||||
.default-layout .ui.segment.item-notes-display {
|
||||
background: rgba(246, 255, 158, 0.4);
|
||||
}
|
||||
|
||||
.default-layout .extracted-text {
|
||||
font-family: monospace;
|
||||
|
Loading…
x
Reference in New Issue
Block a user