mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-04-05 19:09:32 +00:00
commit
324476c1a9
13
Changelog.md
13
Changelog.md
@ -8,18 +8,21 @@
|
|||||||
periodically to import your mails.
|
periodically to import your mails.
|
||||||
- New feature "Integration Endpoint". Allows an admin to upload files
|
- New feature "Integration Endpoint". Allows an admin to upload files
|
||||||
to any collective using a separate endpoint.
|
to any collective using a separate endpoint.
|
||||||
|
- New feature: add files to existing items.
|
||||||
|
- The document list on the front-page has been rewritten. The table is
|
||||||
|
removed and documents are now presented in a “card view”.
|
||||||
|
- Amend the mail-to-pdf conversion to include the e-mail date.
|
||||||
|
- When processing e-mails, set the item date automatically from the
|
||||||
|
received-date in the mail.
|
||||||
|
- Fixes regarding character encodings when reading e-mails.
|
||||||
- Fix the `find-by-checksum` route that, given a sha256 checksum,
|
- Fix the `find-by-checksum` route that, given a sha256 checksum,
|
||||||
returns whether there is such a file in docspell. It falsely
|
returns whether there is such a file in docspell. It falsely
|
||||||
returned `false` although documents existed.
|
returned `false` although documents existed.
|
||||||
- Amend the mail-to-pdf conversion to include the e-mail date.
|
|
||||||
- Fix webapp for mobile devices.
|
- Fix webapp for mobile devices.
|
||||||
- The document list on the front-page has been rewritten. The table is
|
|
||||||
removed and documents are now presented in a “card view”.
|
|
||||||
- Fix the search menu to remember dates in fields. When going back
|
- Fix the search menu to remember dates in fields. When going back
|
||||||
from an item detail to the front-page, the search menu remembers the
|
from an item detail to the front-page, the search menu remembers the
|
||||||
last state, but dates were cleared.
|
last state, but dates were cleared.
|
||||||
- More fixes regarding character encodings when reading e-mails.
|
- Fix redirecting `/` only to `/app`.
|
||||||
- Fix redirecting `/` to `/app`.
|
|
||||||
|
|
||||||
### Configuration Changes
|
### Configuration Changes
|
||||||
|
|
||||||
|
@ -19,6 +19,15 @@ private[analysis] object Tld {
|
|||||||
".edu",
|
".edu",
|
||||||
".gov",
|
".gov",
|
||||||
".mil",
|
".mil",
|
||||||
|
".info",
|
||||||
|
".app",
|
||||||
|
".bar",
|
||||||
|
".biz",
|
||||||
|
".club",
|
||||||
|
".coop",
|
||||||
|
".icu",
|
||||||
|
".name",
|
||||||
|
".xyz",
|
||||||
".ad",
|
".ad",
|
||||||
".ae",
|
".ae",
|
||||||
".al",
|
".al",
|
||||||
|
@ -1,15 +1,17 @@
|
|||||||
package docspell.backend.ops
|
package docspell.backend.ops
|
||||||
|
|
||||||
import bitpeace.MimetypeHint
|
import bitpeace.MimetypeHint
|
||||||
import cats.implicits._
|
import cats.Functor
|
||||||
|
import cats.data.{EitherT, OptionT}
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
|
import cats.implicits._
|
||||||
import docspell.backend.Config
|
import docspell.backend.Config
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.common.syntax.all._
|
import docspell.common.syntax.all._
|
||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
import docspell.store.queue.JobQueue
|
import docspell.store.queue.JobQueue
|
||||||
import docspell.store.records.{RCollective, RJob, RSource}
|
import docspell.store.records._
|
||||||
import org.log4s._
|
import org.log4s._
|
||||||
|
|
||||||
trait OUpload[F[_]] {
|
trait OUpload[F[_]] {
|
||||||
@ -17,14 +19,29 @@ trait OUpload[F[_]] {
|
|||||||
def submit(
|
def submit(
|
||||||
data: OUpload.UploadData[F],
|
data: OUpload.UploadData[F],
|
||||||
account: AccountId,
|
account: AccountId,
|
||||||
notifyJoex: Boolean
|
notifyJoex: Boolean,
|
||||||
|
itemId: Option[Ident]
|
||||||
): F[OUpload.UploadResult]
|
): F[OUpload.UploadResult]
|
||||||
|
|
||||||
def submit(
|
def submit(
|
||||||
data: OUpload.UploadData[F],
|
data: OUpload.UploadData[F],
|
||||||
sourceId: Ident,
|
sourceId: Ident,
|
||||||
notifyJoex: Boolean
|
notifyJoex: Boolean,
|
||||||
|
itemId: Option[Ident]
|
||||||
): F[OUpload.UploadResult]
|
): F[OUpload.UploadResult]
|
||||||
|
|
||||||
|
final def submitEither(
|
||||||
|
data: OUpload.UploadData[F],
|
||||||
|
accOrSrc: Either[Ident, AccountId],
|
||||||
|
notifyJoex: Boolean,
|
||||||
|
itemId: Option[Ident]
|
||||||
|
): F[OUpload.UploadResult] =
|
||||||
|
accOrSrc match {
|
||||||
|
case Right(acc) =>
|
||||||
|
submit(data, acc, notifyJoex, itemId)
|
||||||
|
case Left(srcId) =>
|
||||||
|
submit(data, srcId, notifyJoex, itemId)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
object OUpload {
|
object OUpload {
|
||||||
@ -52,11 +69,32 @@ object OUpload {
|
|||||||
|
|
||||||
sealed trait UploadResult
|
sealed trait UploadResult
|
||||||
object UploadResult {
|
object UploadResult {
|
||||||
case object Success extends UploadResult
|
|
||||||
case object NoFiles extends UploadResult
|
/** File(s) have been successfully submitted. */
|
||||||
|
case object Success extends UploadResult
|
||||||
|
|
||||||
|
def success: UploadResult = Success
|
||||||
|
|
||||||
|
/** There were no files to submit. */
|
||||||
|
case object NoFiles extends UploadResult
|
||||||
|
|
||||||
|
def noFiles: UploadResult = NoFiles
|
||||||
|
|
||||||
|
/** A source (`RSource') could not be found for a given source-id. */
|
||||||
case object NoSource extends UploadResult
|
case object NoSource extends UploadResult
|
||||||
|
|
||||||
|
def noSource: UploadResult = NoSource
|
||||||
|
|
||||||
|
/** When adding files to an item, no item was found using the given
|
||||||
|
* item-id. */
|
||||||
|
case object NoItem extends UploadResult
|
||||||
|
|
||||||
|
def noItem: UploadResult = NoItem
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private def right[F[_]: Functor, A](a: F[A]): EitherT[F, UploadResult, A] =
|
||||||
|
EitherT.right(a)
|
||||||
|
|
||||||
def apply[F[_]: Sync](
|
def apply[F[_]: Sync](
|
||||||
store: Store[F],
|
store: Store[F],
|
||||||
queue: JobQueue[F],
|
queue: JobQueue[F],
|
||||||
@ -68,14 +106,17 @@ object OUpload {
|
|||||||
def submit(
|
def submit(
|
||||||
data: OUpload.UploadData[F],
|
data: OUpload.UploadData[F],
|
||||||
account: AccountId,
|
account: AccountId,
|
||||||
notifyJoex: Boolean
|
notifyJoex: Boolean,
|
||||||
|
itemId: Option[Ident]
|
||||||
): F[OUpload.UploadResult] =
|
): F[OUpload.UploadResult] =
|
||||||
for {
|
(for {
|
||||||
files <- data.files.traverse(saveFile).map(_.flatten)
|
_ <- checkExistingItem(itemId, account.collective)
|
||||||
pred <- checkFileList(files)
|
files <- right(data.files.traverse(saveFile).map(_.flatten))
|
||||||
lang <- store.transact(RCollective.findLanguage(account.collective))
|
_ <- checkFileList(files)
|
||||||
|
lang <- right(store.transact(RCollective.findLanguage(account.collective)))
|
||||||
meta = ProcessItemArgs.ProcessMeta(
|
meta = ProcessItemArgs.ProcessMeta(
|
||||||
account.collective,
|
account.collective,
|
||||||
|
itemId,
|
||||||
lang.getOrElse(Language.German),
|
lang.getOrElse(Language.German),
|
||||||
data.meta.direction,
|
data.meta.direction,
|
||||||
data.meta.sourceAbbrev,
|
data.meta.sourceAbbrev,
|
||||||
@ -84,29 +125,31 @@ object OUpload {
|
|||||||
args =
|
args =
|
||||||
if (data.multiple) files.map(f => ProcessItemArgs(meta, List(f)))
|
if (data.multiple) files.map(f => ProcessItemArgs(meta, List(f)))
|
||||||
else Vector(ProcessItemArgs(meta, files.toList))
|
else Vector(ProcessItemArgs(meta, files.toList))
|
||||||
job <- pred.traverse(_ => makeJobs(args, account, data.priority, data.tracker))
|
jobs <- right(makeJobs(args, account, data.priority, data.tracker))
|
||||||
_ <- logger.fdebug(s"Storing jobs: $job")
|
_ <- right(logger.fdebug(s"Storing jobs: $jobs"))
|
||||||
res <- job.traverse(submitJobs(notifyJoex))
|
res <- right(submitJobs(notifyJoex)(jobs))
|
||||||
_ <- store.transact(
|
_ <- right(
|
||||||
RSource.incrementCounter(data.meta.sourceAbbrev, account.collective)
|
store.transact(
|
||||||
|
RSource.incrementCounter(data.meta.sourceAbbrev, account.collective)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
} yield res.fold(identity, identity)
|
} yield res).fold(identity, identity)
|
||||||
|
|
||||||
def submit(
|
def submit(
|
||||||
data: OUpload.UploadData[F],
|
data: OUpload.UploadData[F],
|
||||||
sourceId: Ident,
|
sourceId: Ident,
|
||||||
notifyJoex: Boolean
|
notifyJoex: Boolean,
|
||||||
|
itemId: Option[Ident]
|
||||||
): F[OUpload.UploadResult] =
|
): F[OUpload.UploadResult] =
|
||||||
for {
|
(for {
|
||||||
sOpt <-
|
src <- OptionT(store.transact(RSource.find(sourceId)))
|
||||||
store
|
updata = data.copy(
|
||||||
.transact(RSource.find(sourceId))
|
meta = data.meta.copy(sourceAbbrev = src.abbrev),
|
||||||
.map(_.toRight(UploadResult.NoSource))
|
priority = src.priority
|
||||||
abbrev = sOpt.map(_.abbrev).toOption.getOrElse(data.meta.sourceAbbrev)
|
)
|
||||||
updata = data.copy(meta = data.meta.copy(sourceAbbrev = abbrev))
|
accId = AccountId(src.cid, src.sid)
|
||||||
accId = sOpt.map(source => AccountId(source.cid, source.sid))
|
result <- OptionT.liftF(submit(updata, accId, notifyJoex, itemId))
|
||||||
result <- accId.traverse(acc => submit(updata, acc, notifyJoex))
|
} yield result).getOrElse(UploadResult.noSource)
|
||||||
} yield result.fold(identity, identity)
|
|
||||||
|
|
||||||
private def submitJobs(
|
private def submitJobs(
|
||||||
notifyJoex: Boolean
|
notifyJoex: Boolean
|
||||||
@ -117,6 +160,7 @@ object OUpload {
|
|||||||
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
|
_ <- if (notifyJoex) joex.notifyAllNodes else ().pure[F]
|
||||||
} yield UploadResult.Success
|
} yield UploadResult.Success
|
||||||
|
|
||||||
|
/** Saves the file into the database. */
|
||||||
private def saveFile(file: File[F]): F[Option[ProcessItemArgs.File]] =
|
private def saveFile(file: File[F]): F[Option[ProcessItemArgs.File]] =
|
||||||
logger.finfo(s"Receiving file $file") *>
|
logger.finfo(s"Receiving file $file") *>
|
||||||
store.bitpeace
|
store.bitpeace
|
||||||
@ -135,10 +179,24 @@ object OUpload {
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
private def checkExistingItem(
|
||||||
|
itemId: Option[Ident],
|
||||||
|
coll: Ident
|
||||||
|
): EitherT[F, UploadResult, Unit] =
|
||||||
|
itemId match {
|
||||||
|
case None =>
|
||||||
|
right(().pure[F])
|
||||||
|
case Some(id) =>
|
||||||
|
OptionT(store.transact(RItem.findByIdAndCollective(id, coll)))
|
||||||
|
.toRight(UploadResult.noItem)
|
||||||
|
.map(_ => ())
|
||||||
|
}
|
||||||
|
|
||||||
private def checkFileList(
|
private def checkFileList(
|
||||||
files: Seq[ProcessItemArgs.File]
|
files: Seq[ProcessItemArgs.File]
|
||||||
): F[Either[UploadResult, Unit]] =
|
): EitherT[F, UploadResult, Unit] =
|
||||||
Sync[F].pure(if (files.isEmpty) Left(UploadResult.NoFiles) else Right(()))
|
if (files.isEmpty) EitherT.left(UploadResult.noFiles.pure[F])
|
||||||
|
else right(().pure[F])
|
||||||
|
|
||||||
private def makeJobs(
|
private def makeJobs(
|
||||||
args: Vector[ProcessItemArgs],
|
args: Vector[ProcessItemArgs],
|
||||||
|
@ -1,11 +1,18 @@
|
|||||||
package docspell.common
|
package docspell.common
|
||||||
|
|
||||||
import io.circe.{Decoder, Encoder}
|
import io.circe.{Decoder, Encoder}
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
|
||||||
sealed trait ItemState { self: Product =>
|
sealed trait ItemState { self: Product =>
|
||||||
|
|
||||||
final def name: String =
|
final def name: String =
|
||||||
productPrefix.toLowerCase
|
productPrefix.toLowerCase
|
||||||
|
|
||||||
|
def isValid: Boolean =
|
||||||
|
ItemState.validStates.exists(_ == this)
|
||||||
|
|
||||||
|
def isInvalid: Boolean =
|
||||||
|
ItemState.invalidStates.exists(_ == this)
|
||||||
}
|
}
|
||||||
|
|
||||||
object ItemState {
|
object ItemState {
|
||||||
@ -24,8 +31,11 @@ object ItemState {
|
|||||||
case _ => Left(s"Invalid item state: $str")
|
case _ => Left(s"Invalid item state: $str")
|
||||||
}
|
}
|
||||||
|
|
||||||
val validStates: Seq[ItemState] =
|
val validStates: NonEmptyList[ItemState] =
|
||||||
Seq(Created, Confirmed)
|
NonEmptyList.of(Created, Confirmed)
|
||||||
|
|
||||||
|
val invalidStates: NonEmptyList[ItemState] =
|
||||||
|
NonEmptyList.of(Premature, Processing)
|
||||||
|
|
||||||
def unsafe(str: String): ItemState =
|
def unsafe(str: String): ItemState =
|
||||||
fromString(str).fold(sys.error, identity)
|
fromString(str).fold(sys.error, identity)
|
||||||
|
@ -4,6 +4,14 @@ import io.circe._, io.circe.generic.semiauto._
|
|||||||
import docspell.common.syntax.all._
|
import docspell.common.syntax.all._
|
||||||
import ProcessItemArgs._
|
import ProcessItemArgs._
|
||||||
|
|
||||||
|
/** Arguments to the process-item task.
|
||||||
|
*
|
||||||
|
* This task is run for each new file to create a new item from it or
|
||||||
|
* to add this file as an attachment to an existing item.
|
||||||
|
*
|
||||||
|
* If the `itemId' is set to some value, the item is tried to load to
|
||||||
|
* ammend with the given files. Otherwise a new item is created.
|
||||||
|
*/
|
||||||
case class ProcessItemArgs(meta: ProcessMeta, files: List[File]) {
|
case class ProcessItemArgs(meta: ProcessMeta, files: List[File]) {
|
||||||
|
|
||||||
def makeSubject: String =
|
def makeSubject: String =
|
||||||
@ -22,6 +30,7 @@ object ProcessItemArgs {
|
|||||||
|
|
||||||
case class ProcessMeta(
|
case class ProcessMeta(
|
||||||
collective: Ident,
|
collective: Ident,
|
||||||
|
itemId: Option[Ident],
|
||||||
language: Language,
|
language: Language,
|
||||||
direction: Option[Direction],
|
direction: Option[Direction],
|
||||||
sourceAbbrev: String,
|
sourceAbbrev: String,
|
||||||
|
@ -77,7 +77,7 @@ object JoexAppImpl {
|
|||||||
.withTask(
|
.withTask(
|
||||||
JobTask.json(
|
JobTask.json(
|
||||||
ProcessItemArgs.taskName,
|
ProcessItemArgs.taskName,
|
||||||
ItemHandler[F](cfg),
|
ItemHandler.newItem[F](cfg),
|
||||||
ItemHandler.onCancel[F]
|
ItemHandler.onCancel[F]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -71,7 +71,7 @@ object NotifyDueItemsTask {
|
|||||||
QItem.Query
|
QItem.Query
|
||||||
.empty(ctx.args.account.collective)
|
.empty(ctx.args.account.collective)
|
||||||
.copy(
|
.copy(
|
||||||
states = ItemState.validStates,
|
states = ItemState.validStates.toList,
|
||||||
tagsInclude = ctx.args.tagsInclude,
|
tagsInclude = ctx.args.tagsInclude,
|
||||||
tagsExclude = ctx.args.tagsExclude,
|
tagsExclude = ctx.args.tagsExclude,
|
||||||
dueDateFrom = ctx.args.daysBack.map(back => now - Duration.days(back.toLong)),
|
dueDateFrom = ctx.args.daysBack.map(back => now - Duration.days(back.toLong)),
|
||||||
|
@ -62,7 +62,7 @@ object ConvertPdf {
|
|||||||
Conversion.create[F](cfg, sanitizeHtml, ctx.blocker, ctx.logger).use { conv =>
|
Conversion.create[F](cfg, sanitizeHtml, ctx.blocker, ctx.logger).use { conv =>
|
||||||
mime match {
|
mime match {
|
||||||
case mt if mt.baseEqual(Mimetype.`application/pdf`) =>
|
case mt if mt.baseEqual(Mimetype.`application/pdf`) =>
|
||||||
ctx.logger.info("Not going to convert a PDF file into a PDF.") *>
|
ctx.logger.debug(s"Not going to convert a PDF file ${ra.name} into a PDF.") *>
|
||||||
(ra, None: Option[RAttachmentMeta]).pure[F]
|
(ra, None: Option[RAttachmentMeta]).pure[F]
|
||||||
|
|
||||||
case _ =>
|
case _ =>
|
||||||
|
@ -32,44 +32,75 @@ object CreateItem {
|
|||||||
|
|
||||||
def fileMetas(itemId: Ident, now: Timestamp) =
|
def fileMetas(itemId: Ident, now: Timestamp) =
|
||||||
Stream
|
Stream
|
||||||
.emits(ctx.args.files)
|
.eval(ctx.store.transact(RAttachment.nextPosition(itemId)))
|
||||||
.flatMap(f => ctx.store.bitpeace.get(f.fileMetaId.id).map(fm => (f, fm)))
|
.flatMap { offset =>
|
||||||
.collect({ case (f, Some(fm)) if isValidFile(fm) => f })
|
Stream
|
||||||
.zipWithIndex
|
.emits(ctx.args.files)
|
||||||
.evalMap({
|
.flatMap(f => ctx.store.bitpeace.get(f.fileMetaId.id).map(fm => (f, fm)))
|
||||||
case (f, index) =>
|
.collect({ case (f, Some(fm)) if isValidFile(fm) => f })
|
||||||
Ident
|
.zipWithIndex
|
||||||
.randomId[F]
|
.evalMap({
|
||||||
.map(id =>
|
case (f, index) =>
|
||||||
RAttachment(id, itemId, f.fileMetaId, index.toInt, now, f.name)
|
Ident
|
||||||
)
|
.randomId[F]
|
||||||
})
|
.map(id =>
|
||||||
|
RAttachment(
|
||||||
|
id,
|
||||||
|
itemId,
|
||||||
|
f.fileMetaId,
|
||||||
|
index.toInt + offset,
|
||||||
|
now,
|
||||||
|
f.name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
.compile
|
.compile
|
||||||
.toVector
|
.toVector
|
||||||
|
|
||||||
val item = RItem.newItem[F](
|
val loadItemOrInsertNew =
|
||||||
ctx.args.meta.collective,
|
ctx.args.meta.itemId match {
|
||||||
ctx.args.makeSubject,
|
case Some(id) =>
|
||||||
ctx.args.meta.sourceAbbrev,
|
(for {
|
||||||
ctx.args.meta.direction.getOrElse(Direction.Incoming),
|
_ <- OptionT.liftF(
|
||||||
ItemState.Premature
|
ctx.logger.info(
|
||||||
)
|
s"Loading item with id ${id.id} to ammend"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
item <- OptionT(
|
||||||
|
ctx.store
|
||||||
|
.transact(RItem.findByIdAndCollective(id, ctx.args.meta.collective))
|
||||||
|
)
|
||||||
|
} yield (1, item))
|
||||||
|
.getOrElseF(Sync[F].raiseError(new Exception(s"Item not found.")))
|
||||||
|
case None =>
|
||||||
|
for {
|
||||||
|
_ <- ctx.logger.info(
|
||||||
|
s"Creating new item with ${ctx.args.files.size} attachment(s)"
|
||||||
|
)
|
||||||
|
item <- RItem.newItem[F](
|
||||||
|
ctx.args.meta.collective,
|
||||||
|
ctx.args.makeSubject,
|
||||||
|
ctx.args.meta.sourceAbbrev,
|
||||||
|
ctx.args.meta.direction.getOrElse(Direction.Incoming),
|
||||||
|
ItemState.Premature
|
||||||
|
)
|
||||||
|
n <- ctx.store.transact(RItem.insert(item))
|
||||||
|
} yield (n, item)
|
||||||
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
_ <- ctx.logger.info(
|
|
||||||
s"Creating new item with ${ctx.args.files.size} attachment(s)"
|
|
||||||
)
|
|
||||||
time <- Duration.stopTime[F]
|
time <- Duration.stopTime[F]
|
||||||
it <- item
|
it <- loadItemOrInsertNew
|
||||||
n <- ctx.store.transact(RItem.insert(it))
|
_ <- if (it._1 != 1) storeItemError[F](ctx) else ().pure[F]
|
||||||
_ <- if (n != 1) storeItemError[F](ctx) else ().pure[F]
|
now <- Timestamp.current[F]
|
||||||
fm <- fileMetas(it.id, it.created)
|
fm <- fileMetas(it._2.id, now)
|
||||||
k <- fm.traverse(insertAttachment(ctx))
|
k <- fm.traverse(insertAttachment(ctx))
|
||||||
_ <- logDifferences(ctx, fm, k.sum)
|
_ <- logDifferences(ctx, fm, k.sum)
|
||||||
dur <- time
|
dur <- time
|
||||||
_ <- ctx.logger.info(s"Creating item finished in ${dur.formatExact}")
|
_ <- ctx.logger.info(s"Creating item finished in ${dur.formatExact}")
|
||||||
} yield ItemData(
|
} yield ItemData(
|
||||||
it,
|
it._2,
|
||||||
fm,
|
fm,
|
||||||
Vector.empty,
|
Vector.empty,
|
||||||
Vector.empty,
|
Vector.empty,
|
||||||
@ -86,10 +117,11 @@ object CreateItem {
|
|||||||
} yield n)
|
} yield n)
|
||||||
}
|
}
|
||||||
|
|
||||||
def findExisting[F[_]: Sync]: Task[F, ProcessItemArgs, Option[ItemData]] =
|
private def findExisting[F[_]: Sync]: Task[F, ProcessItemArgs, Option[ItemData]] =
|
||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
|
val fileMetaIds = ctx.args.files.map(_.fileMetaId).toSet
|
||||||
for {
|
for {
|
||||||
cand <- ctx.store.transact(QItem.findByFileIds(ctx.args.files.map(_.fileMetaId)))
|
cand <- ctx.store.transact(QItem.findByFileIds(fileMetaIds.toSeq))
|
||||||
_ <-
|
_ <-
|
||||||
if (cand.nonEmpty) ctx.logger.warn("Found existing item with these files.")
|
if (cand.nonEmpty) ctx.logger.warn("Found existing item with these files.")
|
||||||
else ().pure[F]
|
else ().pure[F]
|
||||||
@ -99,8 +131,11 @@ object CreateItem {
|
|||||||
ctx.logger.warn(s"Removed ${ht.sum} items with same attachments")
|
ctx.logger.warn(s"Removed ${ht.sum} items with same attachments")
|
||||||
else ().pure[F]
|
else ().pure[F]
|
||||||
rms <- OptionT(
|
rms <- OptionT(
|
||||||
|
//load attachments but only those mentioned in the task's arguments
|
||||||
cand.headOption.traverse(ri =>
|
cand.headOption.traverse(ri =>
|
||||||
ctx.store.transact(RAttachment.findByItemAndCollective(ri.id, ri.cid))
|
ctx.store
|
||||||
|
.transact(RAttachment.findByItemAndCollective(ri.id, ri.cid))
|
||||||
|
.map(_.filter(r => fileMetaIds.contains(r.fileId)))
|
||||||
)
|
)
|
||||||
).getOrElse(Vector.empty)
|
).getOrElse(Vector.empty)
|
||||||
orig <- rms.traverse(a =>
|
orig <- rms.traverse(a =>
|
||||||
|
@ -13,6 +13,8 @@ import docspell.store.records._
|
|||||||
import docspell.files.Zip
|
import docspell.files.Zip
|
||||||
import cats.kernel.Monoid
|
import cats.kernel.Monoid
|
||||||
import emil.Mail
|
import emil.Mail
|
||||||
|
import cats.kernel.Order
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
|
||||||
/** Goes through all attachments and extracts archive files, like zip
|
/** Goes through all attachments and extracts archive files, like zip
|
||||||
* files. The process is recursive, until all archives have been
|
* files. The process is recursive, until all archives have been
|
||||||
@ -46,22 +48,37 @@ object ExtractArchive {
|
|||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
): Task[F, ProcessItemArgs, (Option[RAttachmentArchive], ItemData)] =
|
): Task[F, ProcessItemArgs, (Option[RAttachmentArchive], ItemData)] =
|
||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
def extract(ra: RAttachment) =
|
def extract(ra: RAttachment, pos: Int): F[Extracted] =
|
||||||
findMime(ctx)(ra).flatMap(m => extractSafe(ctx, archive)(ra, m))
|
findMime(ctx)(ra).flatMap(m => extractSafe(ctx, archive)(ra, pos, m))
|
||||||
|
|
||||||
for {
|
for {
|
||||||
ras <- item.attachments.traverse(extract)
|
lastPos <- ctx.store.transact(RAttachment.nextPosition(item.item.id))
|
||||||
nra = ras.flatMap(_.files).zipWithIndex.map(t => t._1.copy(position = t._2))
|
extracts <-
|
||||||
_ <- nra.traverse(storeAttachment(ctx))
|
item.attachments.zipWithIndex
|
||||||
naa = ras.flatMap(_.archives)
|
.traverse(t => extract(t._1, lastPos + t._2))
|
||||||
|
.map(Monoid[Extracted].combineAll)
|
||||||
|
.map(fixPositions)
|
||||||
|
nra = extracts.files
|
||||||
|
_ <- extracts.files.traverse(storeAttachment(ctx))
|
||||||
|
naa = extracts.archives
|
||||||
_ <- naa.traverse(storeArchive(ctx))
|
_ <- naa.traverse(storeArchive(ctx))
|
||||||
} yield naa.headOption -> item.copy(
|
} yield naa.headOption -> item.copy(
|
||||||
attachments = nra,
|
attachments = nra,
|
||||||
originFile = item.originFile ++ nra.map(a => a.id -> a.fileId).toMap,
|
originFile = item.originFile ++ nra.map(a => a.id -> a.fileId).toMap,
|
||||||
givenMeta = item.givenMeta.fillEmptyFrom(Monoid[Extracted].combineAll(ras).meta)
|
givenMeta = item.givenMeta.fillEmptyFrom(extracts.meta)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** After all files have been extracted, the `extract' contains the
|
||||||
|
* whole (combined) result. This fixes positions of the attachments
|
||||||
|
* such that the elements of an archive are "spliced" into the
|
||||||
|
* attachment list at the position of the archive. If there is no
|
||||||
|
* archive, positions don't need to be fixed.
|
||||||
|
*/
|
||||||
|
private def fixPositions(extract: Extracted): Extracted =
|
||||||
|
if (extract.archives.isEmpty) extract
|
||||||
|
else extract.updatePositions
|
||||||
|
|
||||||
def findMime[F[_]: Functor](ctx: Context[F, _])(ra: RAttachment): F[Mimetype] =
|
def findMime[F[_]: Functor](ctx: Context[F, _])(ra: RAttachment): F[Mimetype] =
|
||||||
OptionT(ctx.store.transact(RFileMeta.findById(ra.fileId)))
|
OptionT(ctx.store.transact(RFileMeta.findById(ra.fileId)))
|
||||||
.map(_.mimetype)
|
.map(_.mimetype)
|
||||||
@ -70,21 +87,21 @@ object ExtractArchive {
|
|||||||
def extractSafe[F[_]: ConcurrentEffect: ContextShift](
|
def extractSafe[F[_]: ConcurrentEffect: ContextShift](
|
||||||
ctx: Context[F, ProcessItemArgs],
|
ctx: Context[F, ProcessItemArgs],
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
)(ra: RAttachment, mime: Mimetype): F[Extracted] =
|
)(ra: RAttachment, pos: Int, mime: Mimetype): F[Extracted] =
|
||||||
mime match {
|
mime match {
|
||||||
case Mimetype("application", "zip", _) if ra.name.exists(_.endsWith(".zip")) =>
|
case Mimetype("application", "zip", _) if ra.name.exists(_.endsWith(".zip")) =>
|
||||||
ctx.logger.info(s"Extracting zip archive ${ra.name.getOrElse("<noname>")}.") *>
|
ctx.logger.info(s"Extracting zip archive ${ra.name.getOrElse("<noname>")}.") *>
|
||||||
extractZip(ctx, archive)(ra)
|
extractZip(ctx, archive)(ra, pos)
|
||||||
.flatTap(_ => cleanupParents(ctx, ra, archive))
|
.flatTap(_ => cleanupParents(ctx, ra, archive))
|
||||||
|
|
||||||
case Mimetype("message", "rfc822", _) =>
|
case Mimetype("message", "rfc822", _) =>
|
||||||
ctx.logger.info(s"Reading e-mail ${ra.name.getOrElse("<noname>")}") *>
|
ctx.logger.info(s"Reading e-mail ${ra.name.getOrElse("<noname>")}") *>
|
||||||
extractMail(ctx, archive)(ra)
|
extractMail(ctx, archive)(ra, pos)
|
||||||
.flatTap(_ => cleanupParents(ctx, ra, archive))
|
.flatTap(_ => cleanupParents(ctx, ra, archive))
|
||||||
|
|
||||||
case _ =>
|
case _ =>
|
||||||
ctx.logger.debug(s"Not an archive: ${mime.asString}") *>
|
ctx.logger.debug(s"Not an archive: ${mime.asString}") *>
|
||||||
Extracted.noArchive(ra).pure[F]
|
Extracted.noArchive(ra, pos, 0).pure[F]
|
||||||
}
|
}
|
||||||
|
|
||||||
def cleanupParents[F[_]: Sync](
|
def cleanupParents[F[_]: Sync](
|
||||||
@ -114,7 +131,7 @@ object ExtractArchive {
|
|||||||
def extractZip[F[_]: ConcurrentEffect: ContextShift](
|
def extractZip[F[_]: ConcurrentEffect: ContextShift](
|
||||||
ctx: Context[F, _],
|
ctx: Context[F, _],
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
)(ra: RAttachment): F[Extracted] = {
|
)(ra: RAttachment, pos: Int): F[Extracted] = {
|
||||||
val zipData = ctx.store.bitpeace
|
val zipData = ctx.store.bitpeace
|
||||||
.get(ra.fileId.id)
|
.get(ra.fileId.id)
|
||||||
.unNoneTerminate
|
.unNoneTerminate
|
||||||
@ -122,7 +139,8 @@ object ExtractArchive {
|
|||||||
|
|
||||||
zipData
|
zipData
|
||||||
.through(Zip.unzipP[F](8192, ctx.blocker))
|
.through(Zip.unzipP[F](8192, ctx.blocker))
|
||||||
.flatMap(handleEntry(ctx, ra, archive, None))
|
.zipWithIndex
|
||||||
|
.flatMap(handleEntry(ctx, ra, pos, archive, None))
|
||||||
.foldMonoid
|
.foldMonoid
|
||||||
.compile
|
.compile
|
||||||
.lastOrError
|
.lastOrError
|
||||||
@ -131,7 +149,7 @@ object ExtractArchive {
|
|||||||
def extractMail[F[_]: ConcurrentEffect: ContextShift](
|
def extractMail[F[_]: ConcurrentEffect: ContextShift](
|
||||||
ctx: Context[F, _],
|
ctx: Context[F, _],
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
)(ra: RAttachment): F[Extracted] = {
|
)(ra: RAttachment, pos: Int): F[Extracted] = {
|
||||||
val email: Stream[F, Byte] = ctx.store.bitpeace
|
val email: Stream[F, Byte] = ctx.store.bitpeace
|
||||||
.get(ra.fileId.id)
|
.get(ra.fileId.id)
|
||||||
.unNoneTerminate
|
.unNoneTerminate
|
||||||
@ -149,7 +167,8 @@ object ExtractArchive {
|
|||||||
|
|
||||||
ReadMail
|
ReadMail
|
||||||
.mailToEntries(ctx.logger)(mail)
|
.mailToEntries(ctx.logger)(mail)
|
||||||
.flatMap(handleEntry(ctx, ra, archive, mId)) ++ Stream.eval(givenMeta)
|
.zipWithIndex
|
||||||
|
.flatMap(handleEntry(ctx, ra, pos, archive, mId)) ++ Stream.eval(givenMeta)
|
||||||
}
|
}
|
||||||
.foldMonoid
|
.foldMonoid
|
||||||
.compile
|
.compile
|
||||||
@ -165,13 +184,15 @@ object ExtractArchive {
|
|||||||
def handleEntry[F[_]: Sync](
|
def handleEntry[F[_]: Sync](
|
||||||
ctx: Context[F, _],
|
ctx: Context[F, _],
|
||||||
ra: RAttachment,
|
ra: RAttachment,
|
||||||
|
pos: Int,
|
||||||
archive: Option[RAttachmentArchive],
|
archive: Option[RAttachmentArchive],
|
||||||
messageId: Option[String]
|
messageId: Option[String]
|
||||||
)(
|
)(
|
||||||
entry: Binary[F]
|
tentry: (Binary[F], Long)
|
||||||
): Stream[F, Extracted] = {
|
): Stream[F, Extracted] = {
|
||||||
val mimeHint = MimetypeHint.filename(entry.name).withAdvertised(entry.mime.asString)
|
val (entry, subPos) = tentry
|
||||||
val fileMeta = ctx.store.bitpeace.saveNew(entry.data, 8192, mimeHint)
|
val mimeHint = MimetypeHint.filename(entry.name).withAdvertised(entry.mime.asString)
|
||||||
|
val fileMeta = ctx.store.bitpeace.saveNew(entry.data, 8192, mimeHint)
|
||||||
Stream.eval(ctx.logger.debug(s"Extracted ${entry.name}. Storing as attachment.")) >>
|
Stream.eval(ctx.logger.debug(s"Extracted ${entry.name}. Storing as attachment.")) >>
|
||||||
fileMeta.evalMap { fm =>
|
fileMeta.evalMap { fm =>
|
||||||
Ident.randomId.map { id =>
|
Ident.randomId.map { id =>
|
||||||
@ -179,12 +200,12 @@ object ExtractArchive {
|
|||||||
id,
|
id,
|
||||||
ra.itemId,
|
ra.itemId,
|
||||||
Ident.unsafe(fm.id),
|
Ident.unsafe(fm.id),
|
||||||
0, //position is updated afterwards
|
pos,
|
||||||
ra.created,
|
ra.created,
|
||||||
Option(entry.name).map(_.trim).filter(_.nonEmpty)
|
Option(entry.name).map(_.trim).filter(_.nonEmpty)
|
||||||
)
|
)
|
||||||
val aa = archive.getOrElse(RAttachmentArchive.of(ra, messageId)).copy(id = id)
|
val aa = archive.getOrElse(RAttachmentArchive.of(ra, messageId)).copy(id = id)
|
||||||
Extracted.of(nra, aa)
|
Extracted.of(nra, aa, pos, subPos.toInt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -204,28 +225,67 @@ object ExtractArchive {
|
|||||||
case class Extracted(
|
case class Extracted(
|
||||||
files: Vector[RAttachment],
|
files: Vector[RAttachment],
|
||||||
archives: Vector[RAttachmentArchive],
|
archives: Vector[RAttachmentArchive],
|
||||||
meta: MetaProposalList
|
meta: MetaProposalList,
|
||||||
|
positions: List[Extracted.Pos]
|
||||||
) {
|
) {
|
||||||
def ++(e: Extracted) =
|
def ++(e: Extracted) =
|
||||||
Extracted(files ++ e.files, archives ++ e.archives, meta.fillEmptyFrom(e.meta))
|
Extracted(
|
||||||
|
files ++ e.files,
|
||||||
|
archives ++ e.archives,
|
||||||
|
meta.fillEmptyFrom(e.meta),
|
||||||
|
positions ++ e.positions
|
||||||
|
)
|
||||||
|
|
||||||
def setMeta(m: MetaProposal): Extracted =
|
def setMeta(m: MetaProposal): Extracted =
|
||||||
setMeta(MetaProposalList.of(m))
|
setMeta(MetaProposalList.of(m))
|
||||||
|
|
||||||
def setMeta(ml: MetaProposalList): Extracted =
|
def setMeta(ml: MetaProposalList): Extracted =
|
||||||
Extracted(files, archives, meta.fillEmptyFrom(ml))
|
Extracted(files, archives, meta.fillEmptyFrom(ml), positions)
|
||||||
|
|
||||||
|
def updatePositions: Extracted =
|
||||||
|
NonEmptyList.fromList(positions) match {
|
||||||
|
case None =>
|
||||||
|
this
|
||||||
|
case Some(nel) =>
|
||||||
|
val sorted = nel.sorted
|
||||||
|
println(s"---------------------------- $sorted ")
|
||||||
|
val offset = sorted.head.first
|
||||||
|
val pos =
|
||||||
|
sorted.zipWithIndex.map({ case (p, i) => p.id -> (i + offset) }).toList.toMap
|
||||||
|
val nf =
|
||||||
|
files.map(f => pos.get(f.id).map(n => f.copy(position = n)).getOrElse(f))
|
||||||
|
copy(files = nf)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
object Extracted {
|
object Extracted {
|
||||||
val empty = Extracted(Vector.empty, Vector.empty, MetaProposalList.empty)
|
val empty =
|
||||||
|
Extracted(Vector.empty, Vector.empty, MetaProposalList.empty, Nil)
|
||||||
|
|
||||||
def noArchive(ra: RAttachment): Extracted =
|
def noArchive(ra: RAttachment, pos: Int, subPos: Int): Extracted =
|
||||||
Extracted(Vector(ra), Vector.empty, MetaProposalList.empty)
|
Extracted(
|
||||||
|
Vector(ra),
|
||||||
|
Vector.empty,
|
||||||
|
MetaProposalList.empty,
|
||||||
|
List(Pos(ra.id, pos, subPos))
|
||||||
|
)
|
||||||
|
|
||||||
def of(ra: RAttachment, aa: RAttachmentArchive): Extracted =
|
def of(ra: RAttachment, aa: RAttachmentArchive, pos: Int, subPos: Int): Extracted =
|
||||||
Extracted(Vector(ra), Vector(aa), MetaProposalList.empty)
|
Extracted(
|
||||||
|
Vector(ra),
|
||||||
|
Vector(aa),
|
||||||
|
MetaProposalList.empty,
|
||||||
|
List(Pos(ra.id, pos, subPos))
|
||||||
|
)
|
||||||
|
|
||||||
implicit val extractedMonoid: Monoid[Extracted] =
|
implicit val extractedMonoid: Monoid[Extracted] =
|
||||||
Monoid.instance(empty, _ ++ _)
|
Monoid.instance(empty, _ ++ _)
|
||||||
|
|
||||||
|
case class Pos(id: Ident, first: Int, second: Int)
|
||||||
|
|
||||||
|
object Pos {
|
||||||
|
implicit val ordering: Order[Pos] =
|
||||||
|
Order.whenEqual(Order.by(_.first), Order.by(_.second))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -2,19 +2,20 @@ package docspell.joex.process
|
|||||||
|
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
|
import fs2.Stream
|
||||||
import docspell.common.{ItemState, ProcessItemArgs}
|
import docspell.common.{ItemState, ProcessItemArgs}
|
||||||
import docspell.joex.Config
|
import docspell.joex.Config
|
||||||
import docspell.joex.scheduler.{Context, Task}
|
import docspell.joex.scheduler.Task
|
||||||
import docspell.store.queries.QItem
|
import docspell.store.queries.QItem
|
||||||
import docspell.store.records.{RItem, RJob}
|
import docspell.store.records.RItem
|
||||||
|
|
||||||
object ItemHandler {
|
object ItemHandler {
|
||||||
def onCancel[F[_]: Sync: ContextShift]: Task[F, ProcessItemArgs, Unit] =
|
def onCancel[F[_]: Sync: ContextShift]: Task[F, ProcessItemArgs, Unit] =
|
||||||
logWarn("Now cancelling. Deleting potentially created data.").flatMap(_ =>
|
logWarn("Now cancelling. Deleting potentially created data.").flatMap(_ =>
|
||||||
deleteByFileIds
|
deleteByFileIds.flatMap(_ => deleteFiles)
|
||||||
)
|
)
|
||||||
|
|
||||||
def apply[F[_]: ConcurrentEffect: ContextShift](
|
def newItem[F[_]: ConcurrentEffect: ContextShift](
|
||||||
cfg: Config
|
cfg: Config
|
||||||
): Task[F, ProcessItemArgs, Unit] =
|
): Task[F, ProcessItemArgs, Unit] =
|
||||||
CreateItem[F]
|
CreateItem[F]
|
||||||
@ -25,18 +26,19 @@ object ItemHandler {
|
|||||||
def itemStateTask[F[_]: Sync, A](
|
def itemStateTask[F[_]: Sync, A](
|
||||||
state: ItemState
|
state: ItemState
|
||||||
)(data: ItemData): Task[F, A, ItemData] =
|
)(data: ItemData): Task[F, A, ItemData] =
|
||||||
Task(ctx => ctx.store.transact(RItem.updateState(data.item.id, state)).map(_ => data))
|
Task(ctx =>
|
||||||
|
ctx.store
|
||||||
|
.transact(RItem.updateState(data.item.id, state, ItemState.invalidStates))
|
||||||
|
.map(_ => data)
|
||||||
|
)
|
||||||
|
|
||||||
def isLastRetry[F[_]: Sync, A](ctx: Context[F, A]): F[Boolean] =
|
def isLastRetry[F[_]: Sync]: Task[F, ProcessItemArgs, Boolean] =
|
||||||
for {
|
Task(_.isLastRetry)
|
||||||
current <- ctx.store.transact(RJob.getRetries(ctx.jobId))
|
|
||||||
last = ctx.config.retries == current.getOrElse(0)
|
|
||||||
} yield last
|
|
||||||
|
|
||||||
def safeProcess[F[_]: ConcurrentEffect: ContextShift](
|
def safeProcess[F[_]: ConcurrentEffect: ContextShift](
|
||||||
cfg: Config
|
cfg: Config
|
||||||
)(data: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
)(data: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
||||||
Task(isLastRetry[F, ProcessItemArgs] _).flatMap {
|
isLastRetry[F].flatMap {
|
||||||
case true =>
|
case true =>
|
||||||
ProcessItem[F](cfg)(data).attempt.flatMap({
|
ProcessItem[F](cfg)(data).attempt.flatMap({
|
||||||
case Right(d) =>
|
case Right(d) =>
|
||||||
@ -60,6 +62,15 @@ object ItemHandler {
|
|||||||
} yield ()
|
} yield ()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private def deleteFiles[F[_]: Sync]: Task[F, ProcessItemArgs, Unit] =
|
||||||
|
Task(ctx =>
|
||||||
|
Stream
|
||||||
|
.emits(ctx.args.files.map(_.fileMetaId.id))
|
||||||
|
.flatMap(id => ctx.store.bitpeace.delete(id).attempt.drain)
|
||||||
|
.compile
|
||||||
|
.drain
|
||||||
|
)
|
||||||
|
|
||||||
private def logWarn[F[_]](msg: => String): Task[F, ProcessItemArgs, Unit] =
|
private def logWarn[F[_]](msg: => String): Task[F, ProcessItemArgs, Unit] =
|
||||||
Task(_.logger.warn(msg))
|
Task(_.logger.warn(msg))
|
||||||
}
|
}
|
||||||
|
@ -9,21 +9,26 @@ import docspell.store.records.RItem
|
|||||||
object LinkProposal {
|
object LinkProposal {
|
||||||
|
|
||||||
def apply[F[_]: Sync](data: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
def apply[F[_]: Sync](data: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
||||||
Task { ctx =>
|
if (data.item.state.isValid)
|
||||||
// sort by weight; order of equal weights is not important, just
|
Task
|
||||||
// choose one others are then suggestions
|
.log[F, ProcessItemArgs](_.debug(s"Not linking proposals on existing item"))
|
||||||
// doc-date is only set when given explicitely, not from "guessing"
|
.map(_ => data)
|
||||||
val proposals = MetaProposalList
|
else
|
||||||
.flatten(data.metas.map(_.proposals))
|
Task { ctx =>
|
||||||
.filter(_.proposalType != MetaProposalType.DocDate)
|
// sort by weight; order of equal weights is not important, just
|
||||||
.sortByWeights
|
// choose one others are then suggestions
|
||||||
|
// doc-date is only set when given explicitely, not from "guessing"
|
||||||
|
val proposals = MetaProposalList
|
||||||
|
.flatten(data.metas.map(_.proposals))
|
||||||
|
.filter(_.proposalType != MetaProposalType.DocDate)
|
||||||
|
.sortByWeights
|
||||||
|
|
||||||
ctx.logger.info(s"Starting linking proposals") *>
|
ctx.logger.info(s"Starting linking proposals") *>
|
||||||
MetaProposalType.all
|
MetaProposalType.all
|
||||||
.traverse(applyValue(data, proposals, ctx))
|
.traverse(applyValue(data, proposals, ctx))
|
||||||
.map(result => ctx.logger.info(s"Results from proposal processing: $result"))
|
.map(result => ctx.logger.info(s"Results from proposal processing: $result"))
|
||||||
.map(_ => data)
|
.map(_ => data)
|
||||||
}
|
}
|
||||||
|
|
||||||
def applyValue[F[_]: Sync](
|
def applyValue[F[_]: Sync](
|
||||||
data: ItemData,
|
data: ItemData,
|
||||||
@ -40,8 +45,9 @@ object LinkProposal {
|
|||||||
Result.single(mpt)
|
Result.single(mpt)
|
||||||
)
|
)
|
||||||
case Some(a) =>
|
case Some(a) =>
|
||||||
|
val ids = a.values.map(_.ref.id.id)
|
||||||
ctx.logger.info(
|
ctx.logger.info(
|
||||||
s"Found many (${a.size}, ${a.values.map(_.ref.id.id)}) candidates for ${a.proposalType}. Setting first."
|
s"Found many (${a.size}, ${ids}) candidates for ${a.proposalType}. Setting first."
|
||||||
) *>
|
) *>
|
||||||
setItemMeta(data.item.id, ctx, a.proposalType, a.values.head.ref.id).map(_ =>
|
setItemMeta(data.item.id, ctx, a.proposalType, a.values.head.ref.id).map(_ =>
|
||||||
Result.multiple(mpt)
|
Result.multiple(mpt)
|
||||||
|
@ -12,11 +12,13 @@ object ProcessItem {
|
|||||||
cfg: Config
|
cfg: Config
|
||||||
)(item: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
)(item: ItemData): Task[F, ProcessItemArgs, ItemData] =
|
||||||
ExtractArchive(item)
|
ExtractArchive(item)
|
||||||
|
.flatMap(Task.setProgress(20))
|
||||||
.flatMap(ConvertPdf(cfg.convert, _))
|
.flatMap(ConvertPdf(cfg.convert, _))
|
||||||
|
.flatMap(Task.setProgress(40))
|
||||||
.flatMap(TextExtraction(cfg.extraction, _))
|
.flatMap(TextExtraction(cfg.extraction, _))
|
||||||
.flatMap(Task.setProgress(50))
|
.flatMap(Task.setProgress(60))
|
||||||
.flatMap(analysisOnly[F](cfg.textAnalysis))
|
.flatMap(analysisOnly[F](cfg.textAnalysis))
|
||||||
.flatMap(Task.setProgress(75))
|
.flatMap(Task.setProgress(80))
|
||||||
.flatMap(LinkProposal[F])
|
.flatMap(LinkProposal[F])
|
||||||
.flatMap(Task.setProgress(99))
|
.flatMap(Task.setProgress(99))
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ object TextExtraction {
|
|||||||
rm => rm.setContentIfEmpty(txt.map(_.trim).filter(_.nonEmpty))
|
rm => rm.setContentIfEmpty(txt.map(_.trim).filter(_.nonEmpty))
|
||||||
)
|
)
|
||||||
est <- dst
|
est <- dst
|
||||||
_ <- ctx.logger.debug(
|
_ <- ctx.logger.info(
|
||||||
s"Extracting text for attachment ${stripAttachmentName(ra)} finished in ${est.formatExact}"
|
s"Extracting text for attachment ${stripAttachmentName(ra)} finished in ${est.formatExact}"
|
||||||
)
|
)
|
||||||
} yield meta
|
} yield meta
|
||||||
|
@ -259,7 +259,7 @@ object ScanMailboxTask {
|
|||||||
priority = Priority.Low,
|
priority = Priority.Low,
|
||||||
tracker = None
|
tracker = None
|
||||||
)
|
)
|
||||||
res <- upload.submit(data, ctx.args.account, false)
|
res <- upload.submit(data, ctx.args.account, false, None)
|
||||||
} yield res
|
} yield res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package docspell.joex.scheduler
|
package docspell.joex.scheduler
|
||||||
|
|
||||||
import cats.Functor
|
import cats.{Applicative, Functor}
|
||||||
import cats.effect.{Blocker, Concurrent}
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
@ -23,6 +23,12 @@ trait Context[F[_], A] { self =>
|
|||||||
|
|
||||||
def store: Store[F]
|
def store: Store[F]
|
||||||
|
|
||||||
|
final def isLastRetry(implicit ev: Applicative[F]): F[Boolean] =
|
||||||
|
for {
|
||||||
|
current <- store.transact(RJob.getRetries(jobId))
|
||||||
|
last = config.retries == current.getOrElse(0)
|
||||||
|
} yield last
|
||||||
|
|
||||||
def blocker: Blocker
|
def blocker: Blocker
|
||||||
|
|
||||||
def map[C](f: A => C)(implicit F: Functor[F]): Context[F, C] =
|
def map[C](f: A => C)(implicit F: Functor[F]): Context[F, C] =
|
||||||
|
@ -87,12 +87,6 @@ paths:
|
|||||||
The upload meta data can be used to tell, whether multiple
|
The upload meta data can be used to tell, whether multiple
|
||||||
files are one item, or if each file should become a single
|
files are one item, or if each file should become a single
|
||||||
item. By default, each file will be a one item.
|
item. By default, each file will be a one item.
|
||||||
|
|
||||||
Only certain file types are supported:
|
|
||||||
|
|
||||||
* application/pdf
|
|
||||||
|
|
||||||
Support for more types might be added.
|
|
||||||
parameters:
|
parameters:
|
||||||
- $ref: "#/components/parameters/id"
|
- $ref: "#/components/parameters/id"
|
||||||
requestBody:
|
requestBody:
|
||||||
@ -115,6 +109,48 @@ paths:
|
|||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/BasicResult"
|
$ref: "#/components/schemas/BasicResult"
|
||||||
|
/open/upload/item/{itemId}/{id}:
|
||||||
|
post:
|
||||||
|
tags: [ Upload ]
|
||||||
|
summary: Upload files to docspell.
|
||||||
|
description: |
|
||||||
|
Upload a file to docspell for processing. The id is a *source
|
||||||
|
id* configured by a collective. Files are submitted for
|
||||||
|
processing which eventually resuts in an item in the inbox of
|
||||||
|
the corresponding collective. This endpoint associates the
|
||||||
|
files to an existing item identified by its `itemId`.
|
||||||
|
|
||||||
|
The request must be a `multipart/form-data` request, where the
|
||||||
|
first part has name `meta`, is optional and may contain upload
|
||||||
|
metadata as JSON. Checkout the structure `ItemUploadMeta` at
|
||||||
|
the end if it is not shown here. Other parts specify the
|
||||||
|
files. Multiple files can be specified, but at least on is
|
||||||
|
required.
|
||||||
|
|
||||||
|
Upload meta data is ignored.
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/id"
|
||||||
|
- $ref: "#/components/parameters/itemId"
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
multipart/form-data:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
meta:
|
||||||
|
$ref: "#/components/schemas/ItemUploadMeta"
|
||||||
|
file:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
format: binary
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/BasicResult"
|
||||||
/sec/checkfile/{checksum}:
|
/sec/checkfile/{checksum}:
|
||||||
get:
|
get:
|
||||||
tags: [ Upload ]
|
tags: [ Upload ]
|
||||||
@ -155,12 +191,6 @@ paths:
|
|||||||
The upload meta data can be used to tell, whether multiple
|
The upload meta data can be used to tell, whether multiple
|
||||||
files are one item, or if each file should become a single
|
files are one item, or if each file should become a single
|
||||||
item. By default, each file will be a one item.
|
item. By default, each file will be a one item.
|
||||||
|
|
||||||
Only certain file types are supported:
|
|
||||||
|
|
||||||
* application/pdf
|
|
||||||
|
|
||||||
Support for more types might be added.
|
|
||||||
security:
|
security:
|
||||||
- authTokenHeader: []
|
- authTokenHeader: []
|
||||||
requestBody:
|
requestBody:
|
||||||
@ -183,6 +213,50 @@ paths:
|
|||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/BasicResult"
|
$ref: "#/components/schemas/BasicResult"
|
||||||
|
/sec/upload/{itemId}:
|
||||||
|
post:
|
||||||
|
tags: [ Upload ]
|
||||||
|
summary: Upload files to docspell.
|
||||||
|
description: |
|
||||||
|
Upload files to docspell for processing. This route is meant
|
||||||
|
for authenticated users that upload files to their account.
|
||||||
|
This endpoint will associate the files to an existing item
|
||||||
|
identified by its `itemId`.
|
||||||
|
|
||||||
|
Everything else is the same as with the
|
||||||
|
`/open/upload/item/{itemId}/{id}` endpoint.
|
||||||
|
|
||||||
|
The request must be a "multipart/form-data" request, where the
|
||||||
|
first part is optional and may contain upload metadata as
|
||||||
|
JSON. Other parts specify the files. Multiple files can be
|
||||||
|
specified, but at least on is required.
|
||||||
|
|
||||||
|
The upload meta data is ignored, since the item already
|
||||||
|
exists.
|
||||||
|
security:
|
||||||
|
- authTokenHeader: []
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/itemId"
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
multipart/form-data:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
meta:
|
||||||
|
$ref: "#/components/schemas/ItemUploadMeta"
|
||||||
|
file:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
format: binary
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/BasicResult"
|
||||||
/open/signup/register:
|
/open/signup/register:
|
||||||
post:
|
post:
|
||||||
tags: [ Registration ]
|
tags: [ Registration ]
|
||||||
@ -3156,6 +3230,13 @@ components:
|
|||||||
required: true
|
required: true
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
|
itemId:
|
||||||
|
name: itemId
|
||||||
|
in: path
|
||||||
|
description: An identifier for an item
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
full:
|
full:
|
||||||
name: full
|
name: full
|
||||||
in: query
|
in: query
|
||||||
|
@ -109,7 +109,7 @@ trait Conversions {
|
|||||||
coll,
|
coll,
|
||||||
m.name,
|
m.name,
|
||||||
if (m.inbox) Seq(ItemState.Created)
|
if (m.inbox) Seq(ItemState.Created)
|
||||||
else ItemState.validStates,
|
else ItemState.validStates.toList,
|
||||||
m.direction,
|
m.direction,
|
||||||
m.corrPerson,
|
m.corrPerson,
|
||||||
m.corrOrg,
|
m.corrOrg,
|
||||||
@ -470,6 +470,7 @@ trait Conversions {
|
|||||||
case UploadResult.Success => BasicResult(true, "Files submitted.")
|
case UploadResult.Success => BasicResult(true, "Files submitted.")
|
||||||
case UploadResult.NoFiles => BasicResult(false, "There were no files to submit.")
|
case UploadResult.NoFiles => BasicResult(false, "There were no files to submit.")
|
||||||
case UploadResult.NoSource => BasicResult(false, "The source id is not valid.")
|
case UploadResult.NoSource => BasicResult(false, "The source id is not valid.")
|
||||||
|
case UploadResult.NoItem => BasicResult(false, "The item could not be found.")
|
||||||
}
|
}
|
||||||
|
|
||||||
def basicResult(cr: PassChangeResult): BasicResult =
|
def basicResult(cr: PassChangeResult): BasicResult =
|
||||||
|
@ -80,7 +80,7 @@ object IntegrationEndpointRoutes {
|
|||||||
cfg.backend.files.validMimeTypes
|
cfg.backend.files.validMimeTypes
|
||||||
)
|
)
|
||||||
account = AccountId(coll, Ident.unsafe("docspell-system"))
|
account = AccountId(coll, Ident.unsafe("docspell-system"))
|
||||||
result <- backend.upload.submit(updata, account, true)
|
result <- backend.upload.submit(updata, account, true, None)
|
||||||
res <- Ok(basicResult(result))
|
res <- Ok(basicResult(result))
|
||||||
} yield res
|
} yield res
|
||||||
}
|
}
|
||||||
|
@ -2,13 +2,13 @@ package docspell.restserver.routes
|
|||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import docspell.common._
|
||||||
import docspell.backend.BackendApp
|
import docspell.backend.BackendApp
|
||||||
import docspell.backend.auth.AuthToken
|
import docspell.backend.auth.AuthToken
|
||||||
import docspell.common.{Ident, Priority}
|
|
||||||
import docspell.restserver.Config
|
import docspell.restserver.Config
|
||||||
import docspell.restserver.conv.Conversions._
|
import docspell.restserver.conv.Conversions._
|
||||||
import docspell.restserver.http4s.ResponseGenerator
|
import docspell.restserver.http4s.ResponseGenerator
|
||||||
import org.http4s.HttpRoutes
|
import org.http4s._
|
||||||
import org.http4s.circe.CirceEntityEncoder._
|
import org.http4s.circe.CirceEntityEncoder._
|
||||||
import org.http4s.EntityDecoder._
|
import org.http4s.EntityDecoder._
|
||||||
import org.http4s.dsl.Http4sDsl
|
import org.http4s.dsl.Http4sDsl
|
||||||
@ -26,19 +26,14 @@ object UploadRoutes {
|
|||||||
val dsl = new Http4sDsl[F] with ResponseGenerator[F] {}
|
val dsl = new Http4sDsl[F] with ResponseGenerator[F] {}
|
||||||
import dsl._
|
import dsl._
|
||||||
|
|
||||||
|
val submitting = submitFiles[F](backend, cfg, Right(user.account)) _
|
||||||
|
|
||||||
HttpRoutes.of {
|
HttpRoutes.of {
|
||||||
case req @ POST -> Root / "item" =>
|
case req @ POST -> Root / "item" =>
|
||||||
for {
|
submitting(req, None, Priority.High, dsl)
|
||||||
multipart <- req.as[Multipart[F]]
|
|
||||||
updata <- readMultipart(
|
case req @ POST -> Root / "item" / Ident(itemId) =>
|
||||||
multipart,
|
submitting(req, Some(itemId), Priority.High, dsl)
|
||||||
logger,
|
|
||||||
Priority.High,
|
|
||||||
cfg.backend.files.validMimeTypes
|
|
||||||
)
|
|
||||||
result <- backend.upload.submit(updata, user.account, true)
|
|
||||||
res <- Ok(basicResult(result))
|
|
||||||
} yield res
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -48,17 +43,35 @@ object UploadRoutes {
|
|||||||
|
|
||||||
HttpRoutes.of {
|
HttpRoutes.of {
|
||||||
case req @ POST -> Root / "item" / Ident(id) =>
|
case req @ POST -> Root / "item" / Ident(id) =>
|
||||||
for {
|
submitFiles(backend, cfg, Left(id))(req, None, Priority.Low, dsl)
|
||||||
multipart <- req.as[Multipart[F]]
|
|
||||||
updata <- readMultipart(
|
case req @ POST -> Root / "item" / Ident(itemId) / Ident(id) =>
|
||||||
multipart,
|
submitFiles(backend, cfg, Left(id))(req, Some(itemId), Priority.Low, dsl)
|
||||||
logger,
|
|
||||||
Priority.Low,
|
|
||||||
cfg.backend.files.validMimeTypes
|
|
||||||
)
|
|
||||||
result <- backend.upload.submit(updata, id, true)
|
|
||||||
res <- Ok(basicResult(result))
|
|
||||||
} yield res
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private def submitFiles[F[_]: Effect](
|
||||||
|
backend: BackendApp[F],
|
||||||
|
cfg: Config,
|
||||||
|
accOrSrc: Either[Ident, AccountId]
|
||||||
|
)(
|
||||||
|
req: Request[F],
|
||||||
|
itemId: Option[Ident],
|
||||||
|
prio: Priority,
|
||||||
|
dsl: Http4sDsl[F]
|
||||||
|
): F[Response[F]] = {
|
||||||
|
import dsl._
|
||||||
|
|
||||||
|
for {
|
||||||
|
multipart <- req.as[Multipart[F]]
|
||||||
|
updata <- readMultipart(
|
||||||
|
multipart,
|
||||||
|
logger,
|
||||||
|
prio,
|
||||||
|
cfg.backend.files.validMimeTypes
|
||||||
|
)
|
||||||
|
result <- backend.upload.submitEither(updata, accOrSrc, true, itemId)
|
||||||
|
res <- Ok(basicResult(result))
|
||||||
|
} yield res
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -101,4 +101,6 @@ case class Column(name: String, ns: String = "", alias: String = "") {
|
|||||||
def asc: Fragment =
|
def asc: Fragment =
|
||||||
f ++ fr"asc"
|
f ++ fr"asc"
|
||||||
|
|
||||||
|
def max: Fragment =
|
||||||
|
fr"MAX(" ++ f ++ fr")"
|
||||||
}
|
}
|
||||||
|
@ -287,7 +287,7 @@ object QItem {
|
|||||||
n <- store.transact(RItem.deleteByIdAndCollective(itemId, collective))
|
n <- store.transact(RItem.deleteByIdAndCollective(itemId, collective))
|
||||||
} yield tn + rn + n
|
} yield tn + rn + n
|
||||||
|
|
||||||
def findByFileIds(fileMetaIds: List[Ident]): ConnectionIO[Vector[RItem]] = {
|
def findByFileIds(fileMetaIds: Seq[Ident]): ConnectionIO[Vector[RItem]] = {
|
||||||
val IC = RItem.Columns
|
val IC = RItem.Columns
|
||||||
val AC = RAttachment.Columns
|
val AC = RAttachment.Columns
|
||||||
val q =
|
val q =
|
||||||
|
@ -38,6 +38,11 @@ object RAttachment {
|
|||||||
fr"${v.id},${v.itemId},${v.fileId.id},${v.position},${v.created},${v.name}"
|
fr"${v.id},${v.itemId},${v.fileId.id},${v.position},${v.created},${v.name}"
|
||||||
).update.run
|
).update.run
|
||||||
|
|
||||||
|
def nextPosition(id: Ident): ConnectionIO[Int] =
|
||||||
|
for {
|
||||||
|
max <- selectSimple(position.max, table, itemId.is(id)).query[Option[Int]].unique
|
||||||
|
} yield max.map(_ + 1).getOrElse(0)
|
||||||
|
|
||||||
def updateFileIdAndName(
|
def updateFileIdAndName(
|
||||||
attachId: Ident,
|
attachId: Ident,
|
||||||
fId: Ident,
|
fId: Ident,
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
package docspell.store.records
|
package docspell.store.records
|
||||||
|
|
||||||
import cats.implicits._
|
import cats.data.NonEmptyList
|
||||||
import cats.effect.Sync
|
import cats.effect.Sync
|
||||||
|
import cats.implicits._
|
||||||
import doobie._
|
import doobie._
|
||||||
import doobie.implicits._
|
import doobie.implicits._
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
@ -110,12 +111,16 @@ object RItem {
|
|||||||
def getCollective(itemId: Ident): ConnectionIO[Option[Ident]] =
|
def getCollective(itemId: Ident): ConnectionIO[Option[Ident]] =
|
||||||
selectSimple(List(cid), table, id.is(itemId)).query[Ident].option
|
selectSimple(List(cid), table, id.is(itemId)).query[Ident].option
|
||||||
|
|
||||||
def updateState(itemId: Ident, itemState: ItemState): ConnectionIO[Int] =
|
def updateState(
|
||||||
|
itemId: Ident,
|
||||||
|
itemState: ItemState,
|
||||||
|
existing: NonEmptyList[ItemState]
|
||||||
|
): ConnectionIO[Int] =
|
||||||
for {
|
for {
|
||||||
t <- currentTime
|
t <- currentTime
|
||||||
n <- updateRow(
|
n <- updateRow(
|
||||||
table,
|
table,
|
||||||
id.is(itemId),
|
and(id.is(itemId), state.isIn(existing)),
|
||||||
commas(state.setTo(itemState), updated.setTo(t))
|
commas(state.setTo(itemState), updated.setTo(t))
|
||||||
).update.run
|
).update.run
|
||||||
} yield n
|
} yield n
|
||||||
@ -285,4 +290,7 @@ object RItem {
|
|||||||
|
|
||||||
def existsById(itemId: Ident): ConnectionIO[Boolean] =
|
def existsById(itemId: Ident): ConnectionIO[Boolean] =
|
||||||
selectCount(id, table, id.is(itemId)).query[Int].unique.map(_ > 0)
|
selectCount(id, table, id.is(itemId)).query[Int].unique.map(_ > 0)
|
||||||
|
|
||||||
|
def findByIdAndCollective(itemId: Ident, coll: Ident): ConnectionIO[Option[RItem]] =
|
||||||
|
selectSimple(all, table, and(id.is(itemId), cid.is(coll))).query[RItem].option
|
||||||
}
|
}
|
||||||
|
@ -71,6 +71,7 @@ module Api exposing
|
|||||||
, submitNotifyDueItems
|
, submitNotifyDueItems
|
||||||
, updateScanMailbox
|
, updateScanMailbox
|
||||||
, upload
|
, upload
|
||||||
|
, uploadAmend
|
||||||
, uploadSingle
|
, uploadSingle
|
||||||
, versionInfo
|
, versionInfo
|
||||||
)
|
)
|
||||||
@ -429,7 +430,42 @@ createImapSettings flags mname ems receive =
|
|||||||
--- Upload
|
--- Upload
|
||||||
|
|
||||||
|
|
||||||
upload : Flags -> Maybe String -> ItemUploadMeta -> List File -> (String -> Result Http.Error BasicResult -> msg) -> List (Cmd msg)
|
uploadAmend :
|
||||||
|
Flags
|
||||||
|
-> String
|
||||||
|
-> List File
|
||||||
|
-> (String -> Result Http.Error BasicResult -> msg)
|
||||||
|
-> List (Cmd msg)
|
||||||
|
uploadAmend flags itemId files receive =
|
||||||
|
let
|
||||||
|
mkReq file =
|
||||||
|
let
|
||||||
|
fid =
|
||||||
|
Util.File.makeFileId file
|
||||||
|
|
||||||
|
path =
|
||||||
|
"/api/v1/sec/upload/item/" ++ itemId
|
||||||
|
in
|
||||||
|
Http2.authPostTrack
|
||||||
|
{ url = flags.config.baseUrl ++ path
|
||||||
|
, account = getAccount flags
|
||||||
|
, body =
|
||||||
|
Http.multipartBody <|
|
||||||
|
[ Http.filePart "file[]" file ]
|
||||||
|
, expect = Http.expectJson (receive fid) Api.Model.BasicResult.decoder
|
||||||
|
, tracker = fid
|
||||||
|
}
|
||||||
|
in
|
||||||
|
List.map mkReq files
|
||||||
|
|
||||||
|
|
||||||
|
upload :
|
||||||
|
Flags
|
||||||
|
-> Maybe String
|
||||||
|
-> ItemUploadMeta
|
||||||
|
-> List File
|
||||||
|
-> (String -> Result Http.Error BasicResult -> msg)
|
||||||
|
-> List (Cmd msg)
|
||||||
upload flags sourceId meta files receive =
|
upload flags sourceId meta files receive =
|
||||||
let
|
let
|
||||||
metaStr =
|
metaStr =
|
||||||
@ -457,7 +493,14 @@ upload flags sourceId meta files receive =
|
|||||||
List.map mkReq files
|
List.map mkReq files
|
||||||
|
|
||||||
|
|
||||||
uploadSingle : Flags -> Maybe String -> ItemUploadMeta -> String -> List File -> (Result Http.Error BasicResult -> msg) -> Cmd msg
|
uploadSingle :
|
||||||
|
Flags
|
||||||
|
-> Maybe String
|
||||||
|
-> ItemUploadMeta
|
||||||
|
-> String
|
||||||
|
-> List File
|
||||||
|
-> (Result Http.Error BasicResult -> msg)
|
||||||
|
-> Cmd msg
|
||||||
uploadSingle flags sourceId meta track files receive =
|
uploadSingle flags sourceId meta track files receive =
|
||||||
let
|
let
|
||||||
metaStr =
|
metaStr =
|
||||||
|
@ -74,7 +74,7 @@ updateWithSub msg model =
|
|||||||
updateNewInvite m model |> noSub
|
updateNewInvite m model |> noSub
|
||||||
|
|
||||||
ItemDetailMsg m ->
|
ItemDetailMsg m ->
|
||||||
updateItemDetail m model |> noSub
|
updateItemDetail m model
|
||||||
|
|
||||||
VersionResp (Ok info) ->
|
VersionResp (Ok info) ->
|
||||||
( { model | version = info }, Cmd.none ) |> noSub
|
( { model | version = info }, Cmd.none ) |> noSub
|
||||||
@ -172,17 +172,20 @@ updateWithSub msg model =
|
|||||||
( { model | navMenuOpen = not model.navMenuOpen }, Cmd.none, Sub.none )
|
( { model | navMenuOpen = not model.navMenuOpen }, Cmd.none, Sub.none )
|
||||||
|
|
||||||
|
|
||||||
updateItemDetail : Page.ItemDetail.Data.Msg -> Model -> ( Model, Cmd Msg )
|
updateItemDetail : Page.ItemDetail.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
||||||
updateItemDetail lmsg model =
|
updateItemDetail lmsg model =
|
||||||
let
|
let
|
||||||
inav =
|
inav =
|
||||||
Page.Home.Data.itemNav model.itemDetailModel.detail.item.id model.homeModel
|
Page.Home.Data.itemNav model.itemDetailModel.detail.item.id model.homeModel
|
||||||
|
|
||||||
( lm, lc ) =
|
( lm, lc, ls ) =
|
||||||
Page.ItemDetail.Update.update model.key model.flags inav.next lmsg model.itemDetailModel
|
Page.ItemDetail.Update.update model.key model.flags inav.next lmsg model.itemDetailModel
|
||||||
in
|
in
|
||||||
( { model | itemDetailModel = lm }
|
( { model
|
||||||
|
| itemDetailModel = lm
|
||||||
|
}
|
||||||
, Cmd.map ItemDetailMsg lc
|
, Cmd.map ItemDetailMsg lc
|
||||||
|
, Sub.map ItemDetailMsg ls
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -341,7 +344,19 @@ initPage model page =
|
|||||||
updateQueue Page.Queue.Data.StopRefresh model
|
updateQueue Page.Queue.Data.StopRefresh model
|
||||||
|
|
||||||
ItemDetailPage id ->
|
ItemDetailPage id ->
|
||||||
updateItemDetail (Page.ItemDetail.Data.Init id) model
|
let
|
||||||
|
updateDetail m__ =
|
||||||
|
let
|
||||||
|
( m, c, s ) =
|
||||||
|
updateItemDetail (Page.ItemDetail.Data.Init id) m__
|
||||||
|
in
|
||||||
|
( { m | subs = Sub.batch [ m.subs, s ] }, c )
|
||||||
|
in
|
||||||
|
Util.Update.andThen1
|
||||||
|
[ updateDetail
|
||||||
|
, updateQueue Page.Queue.Data.StopRefresh
|
||||||
|
]
|
||||||
|
model
|
||||||
|
|
||||||
|
|
||||||
noSub : ( Model, Cmd Msg ) -> ( Model, Cmd Msg, Sub Msg )
|
noSub : ( Model, Cmd Msg ) -> ( Model, Cmd Msg, Sub Msg )
|
||||||
|
@ -136,6 +136,12 @@ view model =
|
|||||||
[ i [ class "folder open icon" ] []
|
[ i [ class "folder open icon" ] []
|
||||||
, text "Select ..."
|
, text "Select ..."
|
||||||
]
|
]
|
||||||
|
, div [ class "ui center aligned text container" ]
|
||||||
|
[ span [ class "small-info" ]
|
||||||
|
[ text "Choose document files (pdf, docx, txt, html, …). "
|
||||||
|
, text "Archives (zip and eml) are extracted."
|
||||||
|
]
|
||||||
|
]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,7 @@
|
|||||||
module Data.Icons exposing
|
module Data.Icons exposing
|
||||||
( concerned
|
( addFiles
|
||||||
|
, addFilesIcon
|
||||||
|
, concerned
|
||||||
, concernedIcon
|
, concernedIcon
|
||||||
, correspondent
|
, correspondent
|
||||||
, correspondentIcon
|
, correspondentIcon
|
||||||
@ -51,3 +53,13 @@ editNotes =
|
|||||||
editNotesIcon : Html msg
|
editNotesIcon : Html msg
|
||||||
editNotesIcon =
|
editNotesIcon =
|
||||||
i [ class editNotes ] []
|
i [ class editNotes ] []
|
||||||
|
|
||||||
|
|
||||||
|
addFiles : String
|
||||||
|
addFiles =
|
||||||
|
"file plus icon"
|
||||||
|
|
||||||
|
|
||||||
|
addFilesIcon : Html msg
|
||||||
|
addFilesIcon =
|
||||||
|
i [ class addFiles ] []
|
||||||
|
@ -58,7 +58,9 @@ init flags url key =
|
|||||||
Nothing ->
|
Nothing ->
|
||||||
Cmd.none
|
Cmd.none
|
||||||
in
|
in
|
||||||
( m, Cmd.batch [ cmd, Api.versionInfo flags VersionResp, sessionCheck ] )
|
( m
|
||||||
|
, Cmd.batch [ cmd, Api.versionInfo flags VersionResp, sessionCheck ]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
viewDoc : Model -> Document Msg
|
viewDoc : Model -> Document Msg
|
||||||
|
@ -7,25 +7,27 @@ import Data.Flags exposing (Flags)
|
|||||||
import Page.ItemDetail.Data exposing (Model, Msg(..))
|
import Page.ItemDetail.Data exposing (Model, Msg(..))
|
||||||
|
|
||||||
|
|
||||||
update : Nav.Key -> Flags -> Maybe String -> Msg -> Model -> ( Model, Cmd Msg )
|
update : Nav.Key -> Flags -> Maybe String -> Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
||||||
update key flags next msg model =
|
update key flags next msg model =
|
||||||
case msg of
|
case msg of
|
||||||
Init id ->
|
Init id ->
|
||||||
let
|
let
|
||||||
( lm, lc ) =
|
( lm, lc, ls ) =
|
||||||
Comp.ItemDetail.update key flags next Comp.ItemDetail.Init model.detail
|
Comp.ItemDetail.update key flags next Comp.ItemDetail.Init model.detail
|
||||||
in
|
in
|
||||||
( { model | detail = lm }
|
( { model | detail = lm }
|
||||||
, Cmd.batch [ Api.itemDetail flags id ItemResp, Cmd.map ItemDetailMsg lc ]
|
, Cmd.batch [ Api.itemDetail flags id ItemResp, Cmd.map ItemDetailMsg lc ]
|
||||||
|
, Sub.map ItemDetailMsg ls
|
||||||
)
|
)
|
||||||
|
|
||||||
ItemDetailMsg lmsg ->
|
ItemDetailMsg lmsg ->
|
||||||
let
|
let
|
||||||
( lm, lc ) =
|
( lm, lc, ls ) =
|
||||||
Comp.ItemDetail.update key flags next lmsg model.detail
|
Comp.ItemDetail.update key flags next lmsg model.detail
|
||||||
in
|
in
|
||||||
( { model | detail = lm }
|
( { model | detail = lm }
|
||||||
, Cmd.map ItemDetailMsg lc
|
, Cmd.map ItemDetailMsg lc
|
||||||
|
, Sub.map ItemDetailMsg ls
|
||||||
)
|
)
|
||||||
|
|
||||||
ItemResp (Ok item) ->
|
ItemResp (Ok item) ->
|
||||||
@ -36,4 +38,4 @@ update key flags next msg model =
|
|||||||
update key flags next (ItemDetailMsg lmsg) model
|
update key flags next (ItemDetailMsg lmsg) model
|
||||||
|
|
||||||
ItemResp (Err _) ->
|
ItemResp (Err _) ->
|
||||||
( model, Cmd.none )
|
( model, Cmd.none, Sub.none )
|
||||||
|
@ -41,7 +41,12 @@ update sourceId flags msg model =
|
|||||||
|
|
||||||
uploads =
|
uploads =
|
||||||
if model.singleItem then
|
if model.singleItem then
|
||||||
Api.uploadSingle flags sourceId meta uploadAllTracker model.files (SingleUploadResp uploadAllTracker)
|
Api.uploadSingle flags
|
||||||
|
sourceId
|
||||||
|
meta
|
||||||
|
uploadAllTracker
|
||||||
|
model.files
|
||||||
|
(SingleUploadResp uploadAllTracker)
|
||||||
|
|
||||||
else
|
else
|
||||||
Cmd.batch (Api.upload flags sourceId meta model.files SingleUploadResp)
|
Cmd.batch (Api.upload flags sourceId meta model.files SingleUploadResp)
|
||||||
|
@ -70,6 +70,9 @@
|
|||||||
.default-layout .ui.segment .item-notes {
|
.default-layout .ui.segment .item-notes {
|
||||||
padding: 0 1em;
|
padding: 0 1em;
|
||||||
}
|
}
|
||||||
|
.default-layout .ui.segment.item-notes-display {
|
||||||
|
background: rgba(246, 255, 158, 0.4);
|
||||||
|
}
|
||||||
|
|
||||||
.default-layout .extracted-text {
|
.default-layout .extracted-text {
|
||||||
font-family: monospace;
|
font-family: monospace;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user