mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-04-05 10:59:33 +00:00
Merge pull request #1501 from eikek/download
Download multiple files as zip
This commit is contained in:
commit
6531cabe7a
14
build.sbt
14
build.sbt
@ -275,12 +275,24 @@ val openapiScalaSettings = Seq(
|
||||
field.copy(typeDef =
|
||||
TypeDef("EventType", Imports("docspell.notification.api.EventType"))
|
||||
)
|
||||
|
||||
case "jsonminiq" =>
|
||||
field =>
|
||||
field.copy(typeDef =
|
||||
TypeDef("JsonMiniQuery", Imports("docspell.jsonminiq.JsonMiniQuery"))
|
||||
)
|
||||
case "downloadalltype" =>
|
||||
field =>
|
||||
field.copy(typeDef =
|
||||
TypeDef("DownloadAllType", Imports("docspell.common.DownloadAllType"))
|
||||
)
|
||||
case "bytesize" =>
|
||||
field =>
|
||||
field.copy(typeDef = TypeDef("ByteSize", Imports("docspell.common.ByteSize")))
|
||||
case "downloadstate" =>
|
||||
field =>
|
||||
field.copy(typeDef =
|
||||
TypeDef("DownloadState", Imports("docspell.common.DownloadState"))
|
||||
)
|
||||
})
|
||||
)
|
||||
|
||||
|
@ -51,6 +51,7 @@ trait BackendApp[F[_]] {
|
||||
def bookmarks: OQueryBookmarks[F]
|
||||
def fileRepository: OFileRepository[F]
|
||||
def itemLink: OItemLink[F]
|
||||
def downloadAll: ODownloadAll[F]
|
||||
}
|
||||
|
||||
object BackendApp {
|
||||
@ -107,6 +108,7 @@ object BackendApp {
|
||||
bookmarksImpl <- OQueryBookmarks(store)
|
||||
fileRepoImpl <- OFileRepository(store, schedulerModule.jobs)
|
||||
itemLinkImpl <- Resource.pure(OItemLink(store, itemSearchImpl))
|
||||
downloadAllImpl <- Resource.pure(ODownloadAll(store, jobImpl, schedulerModule.jobs))
|
||||
} yield new BackendApp[F] {
|
||||
val pubSub = pubSubT
|
||||
val login = loginImpl
|
||||
@ -136,5 +138,6 @@ object BackendApp {
|
||||
val bookmarks = bookmarksImpl
|
||||
val fileRepository = fileRepoImpl
|
||||
val itemLink = itemLinkImpl
|
||||
val downloadAll = downloadAllImpl
|
||||
}
|
||||
}
|
||||
|
@ -10,11 +10,27 @@ import cats.effect._
|
||||
import cats.implicits._
|
||||
|
||||
import docspell.backend.MailAddressCodec
|
||||
import docspell.backend.task.DownloadZipArgs
|
||||
import docspell.common._
|
||||
import docspell.notification.api.PeriodicQueryArgs
|
||||
import docspell.scheduler.Job
|
||||
|
||||
object JobFactory extends MailAddressCodec {
|
||||
def downloadZip[F[_]: Sync](
|
||||
args: DownloadZipArgs,
|
||||
summaryId: Ident,
|
||||
submitter: AccountId
|
||||
): F[Job[DownloadZipArgs]] =
|
||||
Job.createNew(
|
||||
DownloadZipArgs.taskName,
|
||||
submitter.collective,
|
||||
args,
|
||||
s"Prepare zip file for query",
|
||||
submitter.user,
|
||||
Priority.High,
|
||||
Some(summaryId)
|
||||
)
|
||||
|
||||
def integrityCheck[F[_]: Sync](
|
||||
args: FileIntegrityCheckArgs,
|
||||
submitter: AccountId = DocspellSystem.account
|
||||
@ -25,7 +41,7 @@ object JobFactory extends MailAddressCodec {
|
||||
args,
|
||||
s"Check integrity of files",
|
||||
submitter.user,
|
||||
Priority.High,
|
||||
Priority.Low,
|
||||
Some(FileIntegrityCheckArgs.taskName)
|
||||
)
|
||||
|
||||
|
@ -0,0 +1,25 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.codec
|
||||
|
||||
import docspell.query.{ItemQuery, ItemQueryParser}
|
||||
|
||||
import io.circe.{Decoder, Encoder}
|
||||
|
||||
// NOTE: this is a copy from ItemQueryJson in restapi! TODO cleanup
|
||||
trait ItemQueryCodec {
|
||||
|
||||
implicit val itemQueryDecoder: Decoder[ItemQuery] =
|
||||
Decoder.decodeString.emap(str => ItemQueryParser.parse(str).left.map(_.render))
|
||||
|
||||
implicit val itemQueryEncoder: Encoder[ItemQuery] =
|
||||
Encoder.encodeString.contramap(q =>
|
||||
q.raw.getOrElse(ItemQueryParser.unsafeAsString(q.expr))
|
||||
)
|
||||
}
|
||||
|
||||
object ItemQueryCodec extends ItemQueryCodec
|
@ -0,0 +1,245 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.ops
|
||||
|
||||
import java.security.MessageDigest
|
||||
|
||||
import cats.data.OptionT
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
import fs2.{Pipe, Stream}
|
||||
|
||||
import docspell.backend.JobFactory
|
||||
import docspell.backend.ops.ODownloadAll.model._
|
||||
import docspell.backend.ops.OJob.JobCancelResult
|
||||
import docspell.backend.task.DownloadZipArgs
|
||||
import docspell.common._
|
||||
import docspell.query.ItemQuery.Expr.ValidItemStates
|
||||
import docspell.query.{ItemQuery, ItemQueryParser}
|
||||
import docspell.scheduler.JobStore
|
||||
import docspell.store.Store
|
||||
import docspell.store.file.FileMetadata
|
||||
import docspell.store.queries.{QItem, Query}
|
||||
import docspell.store.records.{RDownloadQuery, RFileMeta, RJob}
|
||||
|
||||
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||
import io.circe.{Decoder, Encoder}
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
trait ODownloadAll[F[_]] {
|
||||
|
||||
/** Calculates what kind of zip file would be created and checks the server thresholds.
|
||||
*/
|
||||
def getSummary(account: AccountId, req: DownloadRequest): F[DownloadSummary]
|
||||
|
||||
/** Same as `getSummary` but also submits the job to really create the zip file if
|
||||
* allowed and necessary.
|
||||
*/
|
||||
def submit(accountId: AccountId, req: DownloadRequest): F[DownloadSummary]
|
||||
|
||||
/** Given the id from the summary, cancels a running job. */
|
||||
def cancelDownload(accountId: AccountId, id: Ident): F[OJob.JobCancelResult]
|
||||
|
||||
/** Returns the file if it is present, given a summary id. */
|
||||
def getFile(collective: Ident, id: Ident): F[Option[DownloadAllFile[F]]]
|
||||
|
||||
/** Deletes a download archive given it's id. */
|
||||
def deleteFile(id: Ident): F[Unit]
|
||||
|
||||
/** Deletes one file at a time going through all older than `ts`. */
|
||||
def deleteOlderThan(ts: Timestamp): F[Int]
|
||||
}
|
||||
|
||||
object ODownloadAll {
|
||||
|
||||
def apply[F[_]: Async](
|
||||
store: Store[F],
|
||||
jobs: OJob[F],
|
||||
jobStore: JobStore[F]
|
||||
): ODownloadAll[F] =
|
||||
new ODownloadAll[F] {
|
||||
private[this] val logger = docspell.logging.getLogger[F]
|
||||
|
||||
def getSummary(
|
||||
account: AccountId,
|
||||
req: DownloadRequest
|
||||
): F[DownloadSummary] = {
|
||||
val query = req.toQuery(account)
|
||||
|
||||
for {
|
||||
now <- Timestamp.current[F]
|
||||
today = now.toUtcDate
|
||||
summary <- store
|
||||
.transact(QItem.findFiles(query, req.fileType, today, req.maxFiles + 1, 50))
|
||||
.through(DownloadSummary.accumulate)
|
||||
.compile
|
||||
.lastOrError
|
||||
state <- getState(summary, req)
|
||||
} yield summary.copy(state = state)
|
||||
}
|
||||
|
||||
def submit(
|
||||
accountId: AccountId,
|
||||
req: DownloadRequest
|
||||
): F[DownloadSummary] = for {
|
||||
_ <- logger.info(s"Download all request: $req")
|
||||
summary <- getSummary(accountId, req)
|
||||
args = DownloadZipArgs(accountId, req)
|
||||
_ <- OptionT
|
||||
.whenF(summary.state == DownloadState.NotPresent) {
|
||||
JobFactory
|
||||
.downloadZip(args, summary.id, accountId)
|
||||
.flatMap(job =>
|
||||
logger.info(s"Submitting download all job: $job") *> jobStore
|
||||
.insertIfNew(job.encode)
|
||||
)
|
||||
}
|
||||
.value
|
||||
_ <- OptionT
|
||||
.whenF(summary.state != DownloadState.NotPresent)(
|
||||
logger.info(s"Not inserting job. State = ${summary.state}")
|
||||
)
|
||||
.value
|
||||
state <- getState(summary, req)
|
||||
} yield summary.copy(state = state)
|
||||
|
||||
private def getState(
|
||||
summary: DownloadSummary,
|
||||
req: DownloadRequest
|
||||
): F[DownloadState] =
|
||||
for {
|
||||
inDB <- store.transact(RDownloadQuery.existsById(summary.id))
|
||||
inQueue <- store.transact(RJob.findNonFinalByTracker(summary.id))
|
||||
state =
|
||||
if (inDB) DownloadState.Present
|
||||
else if (inQueue.isDefined) DownloadState.Preparing
|
||||
else if (
|
||||
summary.fileCount > req.maxFiles || summary.uncompressedSize > req.maxSize
|
||||
) DownloadState.Forbidden
|
||||
else if (summary.fileCount <= 0) DownloadState.Empty
|
||||
else DownloadState.NotPresent
|
||||
} yield state
|
||||
|
||||
def getFile(collective: Ident, id: Ident) =
|
||||
OptionT(store.transact(RDownloadQuery.findById(id)))
|
||||
.map(_._2)
|
||||
.map(md =>
|
||||
DownloadAllFile(id, md.toFileMetadata, store.fileRepo.getBytes(md.id))
|
||||
)
|
||||
.semiflatTap(_ => store.transact(RDownloadQuery.updateAccessNow(id)))
|
||||
.value
|
||||
|
||||
def deleteFile(id: Ident): F[Unit] =
|
||||
(for {
|
||||
x <- OptionT(store.transact(RDownloadQuery.findById(id)))
|
||||
fileKey = x._1.fileId
|
||||
_ <- OptionT.liftF(deleteByFileKey(fileKey))
|
||||
} yield ())
|
||||
.getOrElse(())
|
||||
|
||||
def deleteOlderThan(ts: Timestamp): F[Int] =
|
||||
Stream
|
||||
.eval(store.transact(RDownloadQuery.findOlderThan(ts, 1)))
|
||||
.repeat
|
||||
.takeWhile(_.nonEmpty)
|
||||
.evalMap(_.traverse(deleteByFileKey))
|
||||
.map(_.length)
|
||||
.compile
|
||||
.foldMonoid
|
||||
|
||||
private def deleteByFileKey(fkey: FileKey): F[Unit] =
|
||||
for {
|
||||
_ <- logger.info(s"Deleting download archive: $fkey")
|
||||
_ <- store.transact(RDownloadQuery.deleteByFileKey(fkey))
|
||||
_ <- store.fileRepo.delete(fkey)
|
||||
} yield ()
|
||||
|
||||
def cancelDownload(accountId: AccountId, id: Ident) =
|
||||
OptionT(store.transact(RDownloadQuery.findById(id)))
|
||||
.flatMap(t => OptionT(store.transact(RJob.findNonFinalByTracker(t._1.id))))
|
||||
.semiflatMap(job => jobs.cancelJob(job.id, accountId.collective))
|
||||
.getOrElse(JobCancelResult.jobNotFound)
|
||||
}
|
||||
|
||||
object model {
|
||||
|
||||
final case class DownloadRequest(
|
||||
query: ItemQuery,
|
||||
fileType: DownloadAllType,
|
||||
maxFiles: Int,
|
||||
maxSize: ByteSize
|
||||
) {
|
||||
def toQuery(accountId: AccountId): Query =
|
||||
Query
|
||||
.all(accountId)
|
||||
.withFix(_.andQuery(ValidItemStates))
|
||||
.withCond(_ => Query.QueryExpr(query.expr))
|
||||
|
||||
def itemQueryString =
|
||||
ItemQueryParser.asString(query.expr)
|
||||
}
|
||||
object DownloadRequest {
|
||||
import docspell.backend.codec.ItemQueryCodec._
|
||||
|
||||
implicit val jsonDecoder: Decoder[DownloadRequest] =
|
||||
deriveDecoder
|
||||
|
||||
implicit val jsonEncoder: Encoder[DownloadRequest] =
|
||||
deriveEncoder
|
||||
}
|
||||
|
||||
final case class DownloadSummary(
|
||||
id: Ident,
|
||||
fileCount: Int,
|
||||
uncompressedSize: ByteSize,
|
||||
state: DownloadState
|
||||
)
|
||||
|
||||
object DownloadSummary {
|
||||
val empty: DownloadSummary =
|
||||
DownloadSummary(
|
||||
Ident.unsafe(""),
|
||||
0,
|
||||
ByteSize.zero,
|
||||
DownloadState.Empty
|
||||
)
|
||||
|
||||
def accumulate[F[_]]: Pipe[F, RFileMeta, DownloadSummary] =
|
||||
in =>
|
||||
Stream
|
||||
.suspend {
|
||||
in.fold((empty, MessageDigest.getInstance("SHA-256"))) {
|
||||
case ((summary, digest), meta) =>
|
||||
val next = summary.copy(
|
||||
fileCount = summary.fileCount + 1,
|
||||
uncompressedSize = summary.uncompressedSize + meta.length
|
||||
)
|
||||
digest.update(meta.checksum.toArray)
|
||||
(next, digest)
|
||||
}
|
||||
}
|
||||
.map { case (summary, digest) =>
|
||||
val c = ByteVector.view(digest.digest())
|
||||
val state =
|
||||
if (summary.fileCount > 0) DownloadState.NotPresent
|
||||
else DownloadState.Empty
|
||||
summary.copy(id = Ident.unsafe(c.toBase58), state = state)
|
||||
}
|
||||
}
|
||||
|
||||
final case class DownloadAllFile[F[_]](
|
||||
id: Ident,
|
||||
meta: FileMetadata,
|
||||
data: Stream[F, Byte]
|
||||
) extends OItemSearch.BinaryData[F] {
|
||||
def name = Some(
|
||||
s"docspell-download-${id.id.take(7)}-${meta.created.forFilename}.zip"
|
||||
)
|
||||
def fileId = meta.id
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.backend.task
|
||||
|
||||
import docspell.backend.ops.ODownloadAll.model.DownloadRequest
|
||||
import docspell.common._
|
||||
|
||||
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||
import io.circe.{Decoder, Encoder}
|
||||
|
||||
final case class DownloadZipArgs(accountId: AccountId, req: DownloadRequest)
|
||||
|
||||
object DownloadZipArgs {
|
||||
val taskName: Ident = Ident.unsafe("download-query-zip")
|
||||
|
||||
implicit val jsonEncoder: Encoder[DownloadZipArgs] =
|
||||
deriveEncoder
|
||||
implicit val jsonDecoder: Decoder[DownloadZipArgs] =
|
||||
deriveDecoder
|
||||
}
|
@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
import io.circe.{Decoder, Encoder}
|
||||
|
||||
sealed trait DownloadAllType {
|
||||
def name: String
|
||||
}
|
||||
|
||||
object DownloadAllType {
|
||||
|
||||
case object Converted extends DownloadAllType { val name = "converted" }
|
||||
case object Original extends DownloadAllType { val name = "original" }
|
||||
|
||||
val all: NonEmptyList[DownloadAllType] =
|
||||
NonEmptyList.of(Converted, Original)
|
||||
|
||||
def fromString(str: String): Either[String, DownloadAllType] =
|
||||
all.find(_.name.equalsIgnoreCase(str)).toRight(s"Unknown type: $str")
|
||||
|
||||
def unsafeFromString(str: String): DownloadAllType =
|
||||
fromString(str).fold(sys.error, identity)
|
||||
|
||||
implicit val jsonEncoder: Encoder[DownloadAllType] =
|
||||
Encoder.encodeString.contramap(_.name)
|
||||
|
||||
implicit val jsonDecoder: Decoder[DownloadAllType] =
|
||||
Decoder.decodeString.emap(fromString)
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.common
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
import io.circe.{Decoder, Encoder}
|
||||
|
||||
sealed trait DownloadState {
|
||||
def name: String
|
||||
}
|
||||
object DownloadState {
|
||||
case object Forbidden extends DownloadState { val name = "forbidden" }
|
||||
case object NotPresent extends DownloadState { val name = "notpresent" }
|
||||
case object Preparing extends DownloadState { val name = "preparing" }
|
||||
case object Present extends DownloadState { val name = "present" }
|
||||
case object Empty extends DownloadState { val name = "empty" }
|
||||
|
||||
val all: NonEmptyList[DownloadState] =
|
||||
NonEmptyList.of(Forbidden, NotPresent, Preparing, Present, Empty)
|
||||
|
||||
def fromString(str: String): Either[String, DownloadState] =
|
||||
all.find(_.name.equalsIgnoreCase(str)).toRight(s"Unknown download-state: $str")
|
||||
|
||||
def unsafeFromString(str: String): DownloadState =
|
||||
fromString(str).fold(sys.error, identity)
|
||||
|
||||
implicit val jsonEncoder: Encoder[DownloadState] =
|
||||
Encoder.encodeString.contramap(_.name)
|
||||
|
||||
implicit val jsonDecoder: Decoder[DownloadState] =
|
||||
Decoder.decodeString.emap(fromString)
|
||||
}
|
@ -31,9 +31,16 @@ object FileCategory {
|
||||
case object AttachmentConvert extends FileCategory
|
||||
case object PreviewImage extends FileCategory
|
||||
case object Classifier extends FileCategory
|
||||
case object DownloadAll extends FileCategory
|
||||
|
||||
val all: NonEmptyList[FileCategory] =
|
||||
NonEmptyList.of(AttachmentSource, AttachmentConvert, PreviewImage, Classifier)
|
||||
NonEmptyList.of(
|
||||
AttachmentSource,
|
||||
AttachmentConvert,
|
||||
PreviewImage,
|
||||
Classifier,
|
||||
DownloadAll
|
||||
)
|
||||
|
||||
def fromString(str: String): Either[String, FileCategory] =
|
||||
all.find(_.id.id == str).toRight(s"Unknown category: $str")
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
package docspell.common
|
||||
|
||||
import java.time.format.DateTimeFormatter
|
||||
import java.time.temporal.ChronoUnit
|
||||
import java.time.{Duration => _, _}
|
||||
|
||||
@ -47,6 +48,11 @@ case class Timestamp(value: Instant) {
|
||||
|
||||
def asString: String = value.toString
|
||||
|
||||
def forFilename: String =
|
||||
DateTimeFormatter
|
||||
.ofPattern("YYYY-MM-dd'T'HH-mm-ss")
|
||||
.format(value.atOffset(ZoneOffset.UTC))
|
||||
|
||||
def <(other: Timestamp): Boolean =
|
||||
this.value.isBefore(other.value)
|
||||
|
||||
|
@ -104,6 +104,9 @@ object Implicits {
|
||||
implicit val ftsTypeReader: ConfigReader[FtsType] =
|
||||
ConfigReader[String].emap(reason(FtsType.fromName))
|
||||
|
||||
implicit val byteSizeReader: ConfigReader[ByteSize] =
|
||||
ConfigReader[String].emap(reason(ByteSize.parse))
|
||||
|
||||
def reason[T, A: ClassTag](
|
||||
f: T => Either[String, A]
|
||||
): T => Either[FailureReason, A] =
|
||||
|
@ -7,8 +7,9 @@
|
||||
package docspell.files
|
||||
|
||||
import java.io.InputStream
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.nio.file.Paths
|
||||
import java.util.zip.ZipInputStream
|
||||
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}
|
||||
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
@ -16,9 +17,16 @@ import fs2.{Pipe, Stream}
|
||||
|
||||
import docspell.common.Binary
|
||||
import docspell.common.Glob
|
||||
import docspell.logging.Logger
|
||||
|
||||
object Zip {
|
||||
|
||||
def zip[F[_]: Async](
|
||||
logger: Logger[F],
|
||||
chunkSize: Int
|
||||
): Pipe[F, (String, Stream[F, Byte]), Byte] =
|
||||
in => zipJava(logger, chunkSize, in.through(deduplicate))
|
||||
|
||||
def unzipP[F[_]: Async](chunkSize: Int, glob: Glob): Pipe[F, Byte, Binary[F]] =
|
||||
s => unzip[F](chunkSize, glob)(s)
|
||||
|
||||
@ -53,4 +61,63 @@ object Zip {
|
||||
Binary(name, data)
|
||||
}
|
||||
}
|
||||
|
||||
private def deduplicate[F[_]: Sync, A]: Pipe[F, (String, A), (String, A)] = {
|
||||
def makeName(name: String, count: Int): String =
|
||||
if (count <= 0) name
|
||||
else
|
||||
name.lastIndexOf('.') match {
|
||||
case n if n > 0 =>
|
||||
s"${name.substring(0, n)}_$count${name.substring(n)}"
|
||||
case _ =>
|
||||
s"${name}_$count"
|
||||
}
|
||||
|
||||
def unique(
|
||||
current: Set[String],
|
||||
name: String,
|
||||
counter: Int
|
||||
): (Set[String], String) = {
|
||||
val nextName = makeName(name, counter)
|
||||
if (current.contains(nextName))
|
||||
unique(current, name, counter + 1)
|
||||
else (current + nextName, nextName)
|
||||
}
|
||||
|
||||
in =>
|
||||
Stream
|
||||
.eval(Ref.of[F, Set[String]](Set.empty[String]))
|
||||
.flatMap { ref =>
|
||||
in.evalMap { element =>
|
||||
ref
|
||||
.modify(names => unique(names, element._1, 0))
|
||||
.map(n => (n, element._2))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def zipJava[F[_]: Async](
|
||||
logger: Logger[F],
|
||||
chunkSize: Int,
|
||||
entries: Stream[F, (String, Stream[F, Byte])]
|
||||
): Stream[F, Byte] =
|
||||
fs2.io.readOutputStream(chunkSize) { out =>
|
||||
val zip = new ZipOutputStream(out, StandardCharsets.UTF_8)
|
||||
val writeEntries =
|
||||
entries.evalMap { case (name, bytes) =>
|
||||
val javaOut =
|
||||
bytes.through(
|
||||
fs2.io.writeOutputStream[F](Sync[F].pure(zip), closeAfterUse = false)
|
||||
)
|
||||
val nextEntry =
|
||||
logger.debug(s"Adding $name to zip file…") *>
|
||||
Sync[F].delay(zip.putNextEntry(new ZipEntry(name)))
|
||||
Resource
|
||||
.make(nextEntry)(_ => Sync[F].delay(zip.closeEntry()))
|
||||
.use(_ => javaOut.compile.drain)
|
||||
}
|
||||
val closeStream = Sync[F].delay(zip.close())
|
||||
|
||||
writeEntries.onFinalize(closeStream).compile.drain
|
||||
}
|
||||
}
|
||||
|
@ -187,6 +187,17 @@ docspell.joex {
|
||||
delete-batch = "100"
|
||||
}
|
||||
|
||||
# Zip files created for downloading multiple files are cached and
|
||||
# can be cleared periodically.
|
||||
cleanup-downloads = {
|
||||
|
||||
# Whether to enable clearing old download archives.
|
||||
enabled = true
|
||||
|
||||
# The minimum age of a download file to be deleted.
|
||||
older-than = "14 days"
|
||||
}
|
||||
|
||||
# Removes node entries that are not reachable anymore.
|
||||
check-nodes {
|
||||
# Whether this task is enabled
|
||||
|
@ -11,12 +11,14 @@ import cats.effect.{Async, Resource}
|
||||
import docspell.analysis.TextAnalyser
|
||||
import docspell.backend.fulltext.CreateIndex
|
||||
import docspell.backend.ops._
|
||||
import docspell.backend.task.DownloadZipArgs
|
||||
import docspell.common._
|
||||
import docspell.config.FtsType
|
||||
import docspell.ftsclient.FtsClient
|
||||
import docspell.ftspsql.PsqlFtsClient
|
||||
import docspell.ftssolr.SolrFtsClient
|
||||
import docspell.joex.analysis.RegexNerFile
|
||||
import docspell.joex.download.DownloadZipTask
|
||||
import docspell.joex.emptytrash.EmptyTrashTask
|
||||
import docspell.joex.filecopy.{FileCopyTask, FileIntegrityCheckTask}
|
||||
import docspell.joex.fts.{MigrationTask, ReIndexTask}
|
||||
@ -54,8 +56,11 @@ final class JoexTasks[F[_]: Async](
|
||||
upload: OUpload[F],
|
||||
createIndex: CreateIndex[F],
|
||||
joex: OJoex[F],
|
||||
jobs: OJob[F],
|
||||
itemSearch: OItemSearch[F]
|
||||
) {
|
||||
val downloadAll: ODownloadAll[F] =
|
||||
ODownloadAll(store, jobs, jobStoreModule.jobs)
|
||||
|
||||
def get: JobTaskRegistry[F] =
|
||||
JobTaskRegistry
|
||||
@ -105,7 +110,7 @@ final class JoexTasks[F[_]: Async](
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
HouseKeepingTask.taskName,
|
||||
HouseKeepingTask[F](cfg, store, fileRepo),
|
||||
HouseKeepingTask[F](cfg, store, fileRepo, downloadAll),
|
||||
HouseKeepingTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
@ -207,6 +212,17 @@ final class JoexTasks[F[_]: Async](
|
||||
FileIntegrityCheckTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
.withTask(
|
||||
JobTask.json(
|
||||
DownloadZipArgs.taskName,
|
||||
DownloadZipTask[F](
|
||||
cfg.files.chunkSize,
|
||||
store,
|
||||
ODownloadAll(store, jobs, jobStoreModule.jobs)
|
||||
),
|
||||
DownloadZipTask.onCancel[F]
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
object JoexTasks {
|
||||
@ -233,6 +249,7 @@ object JoexTasks {
|
||||
updateCheck <- UpdateCheck.resource(httpClient)
|
||||
notification <- ONotification(store, notificationModule)
|
||||
fileRepo <- OFileRepository(store, jobStoreModule.jobs)
|
||||
jobs <- OJob(store, joex, pubSub)
|
||||
} yield new JoexTasks[F](
|
||||
cfg,
|
||||
store,
|
||||
@ -248,6 +265,7 @@ object JoexTasks {
|
||||
upload,
|
||||
createIndex,
|
||||
joex,
|
||||
jobs,
|
||||
itemSearchOps
|
||||
)
|
||||
|
||||
|
@ -0,0 +1,101 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.joex.download
|
||||
|
||||
import java.time.format.DateTimeFormatter
|
||||
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
import fs2.{Pipe, Stream}
|
||||
|
||||
import docspell.backend.ops.ODownloadAll
|
||||
import docspell.backend.ops.ODownloadAll.model.DownloadSummary
|
||||
import docspell.backend.task.DownloadZipArgs
|
||||
import docspell.common._
|
||||
import docspell.files.Zip
|
||||
import docspell.scheduler.Task
|
||||
import docspell.store.Store
|
||||
import docspell.store.queries.{ItemFileMeta, QItem}
|
||||
import docspell.store.records.RDownloadQuery
|
||||
|
||||
object DownloadZipTask {
|
||||
type Args = DownloadZipArgs
|
||||
|
||||
def onCancel[F[_]]: Task[F, Args, Unit] =
|
||||
Task.log(_.warn(s"Cancelling ${DownloadZipArgs.taskName.id} task"))
|
||||
|
||||
def apply[F[_]: Async](
|
||||
chunkSize: Int,
|
||||
store: Store[F],
|
||||
downloadOps: ODownloadAll[F]
|
||||
): Task[F, Args, Result] =
|
||||
Task { ctx =>
|
||||
val req = ctx.args.req
|
||||
val query = req.toQuery(ctx.args.accountId)
|
||||
|
||||
val allFiles =
|
||||
Stream
|
||||
.eval(Timestamp.current[F])
|
||||
.flatMap(now =>
|
||||
store.transact(
|
||||
QItem
|
||||
.findFilesDetailed(query, req.fileType, now.toUtcDate, req.maxFiles, 50)
|
||||
)
|
||||
)
|
||||
.through(makeEntries(store))
|
||||
|
||||
val storeZipFile =
|
||||
allFiles
|
||||
.through(Zip.zip(ctx.logger, chunkSize))
|
||||
.through(
|
||||
store.fileRepo.save(
|
||||
ctx.args.accountId.collective,
|
||||
FileCategory.DownloadAll,
|
||||
MimeTypeHint.advertised("application/zip")
|
||||
)
|
||||
)
|
||||
|
||||
for {
|
||||
_ <- ctx.logger.info(s"Start zipping ${req.itemQueryString}")
|
||||
summary <- downloadOps.getSummary(ctx.args.accountId, req)
|
||||
_ <- ctx.logger.debug(s"Summary: $summary")
|
||||
file <- storeZipFile.compile.lastOrError
|
||||
row <- createRow(summary, ctx.args.accountId.collective, file)
|
||||
_ <- ctx.logger.debug(s"Inserting zip file: $row")
|
||||
_ <- store.transact(RDownloadQuery.insert(row))
|
||||
} yield Result(summary.fileCount)
|
||||
}
|
||||
|
||||
def makeEntries[F[_]](
|
||||
store: Store[F]
|
||||
): Pipe[F, ItemFileMeta, (String, Stream[F, Byte])] =
|
||||
_.map { itemFile =>
|
||||
val name = itemFile.fileName.getOrElse(itemFile.name)
|
||||
val month =
|
||||
DateTimeFormatter
|
||||
.ofPattern("YYYY-MM")
|
||||
.format(itemFile.date.toUtcDate)
|
||||
val entry = itemFile.corrOrg
|
||||
.map(_.name)
|
||||
.orElse(itemFile.corrPerson.map(_.name))
|
||||
.map(_.replace('/', '_'))
|
||||
.map(folder => s"$month/$folder/$name")
|
||||
.getOrElse(s"$month/$name")
|
||||
|
||||
val bytes = store.fileRepo.getBytes(itemFile.fileMeta.id)
|
||||
(entry, bytes)
|
||||
}
|
||||
|
||||
def createRow[F[_]: Sync](
|
||||
summary: DownloadSummary,
|
||||
cid: Ident,
|
||||
file: FileKey
|
||||
): F[RDownloadQuery] =
|
||||
Timestamp.current[F].map { now =>
|
||||
RDownloadQuery(summary.id, cid, file, summary.fileCount, now, None, 0)
|
||||
}
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.joex.download
|
||||
|
||||
import docspell.scheduler.JobTaskResultEncoder
|
||||
|
||||
import io.circe.Encoder
|
||||
import io.circe.generic.semiauto.deriveEncoder
|
||||
|
||||
final case class Result(fileCount: Int) {}
|
||||
|
||||
object Result {
|
||||
implicit val jsonEncoder: Encoder[Result] =
|
||||
deriveEncoder
|
||||
|
||||
implicit val jobTaskResultEncoder: JobTaskResultEncoder[Result] =
|
||||
JobTaskResultEncoder.fromJson[Result].withMessage { result =>
|
||||
s"Zipped ${result.fileCount} files."
|
||||
}
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.joex.hk
|
||||
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
|
||||
import docspell.backend.ops.ODownloadAll
|
||||
import docspell.common._
|
||||
import docspell.scheduler._
|
||||
|
||||
object CleanupDownloadsTask {
|
||||
def apply[F[_]: Sync](
|
||||
cfg: HouseKeepingConfig.CleanupDownloads,
|
||||
ops: ODownloadAll[F]
|
||||
): Task[F, Unit, CleanupResult] =
|
||||
Task { ctx =>
|
||||
if (cfg.enabled)
|
||||
for {
|
||||
now <- Timestamp.current[F]
|
||||
ts = now - cfg.olderThan
|
||||
_ <- ctx.logger.info(s"Cleanup downloads older than $ts")
|
||||
n <- ops.deleteOlderThan(ts)
|
||||
_ <- ctx.logger.info(s"Removed $n download archives")
|
||||
} yield CleanupResult.of(n)
|
||||
else
|
||||
ctx.logger.info("CleanupDownloads task is disabled in the configuration") *>
|
||||
CleanupResult.disabled.pure[F]
|
||||
}
|
||||
}
|
@ -16,6 +16,7 @@ case class HouseKeepingConfig(
|
||||
cleanupInvites: CleanupInvites,
|
||||
cleanupJobs: CleanupJobs,
|
||||
cleanupRememberMe: CleanupRememberMe,
|
||||
cleanupDownloads: CleanupDownloads,
|
||||
checkNodes: CheckNodes,
|
||||
integrityCheck: IntegrityCheck
|
||||
)
|
||||
@ -26,6 +27,8 @@ object HouseKeepingConfig {
|
||||
|
||||
case class CleanupJobs(enabled: Boolean, olderThan: Duration, deleteBatch: Int)
|
||||
|
||||
case class CleanupDownloads(enabled: Boolean, olderThan: Duration)
|
||||
|
||||
case class CleanupRememberMe(enabled: Boolean, olderThan: Duration)
|
||||
|
||||
case class CheckNodes(enabled: Boolean, minNotFound: Int)
|
||||
|
@ -9,7 +9,7 @@ package docspell.joex.hk
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
|
||||
import docspell.backend.ops.OFileRepository
|
||||
import docspell.backend.ops.{ODownloadAll, OFileRepository}
|
||||
import docspell.common._
|
||||
import docspell.joex.Config
|
||||
import docspell.joex.filecopy.FileIntegrityCheckTask
|
||||
@ -29,7 +29,8 @@ object HouseKeepingTask {
|
||||
def apply[F[_]: Async](
|
||||
cfg: Config,
|
||||
store: Store[F],
|
||||
fileRepo: OFileRepository[F]
|
||||
fileRepo: OFileRepository[F],
|
||||
downloadAll: ODownloadAll[F]
|
||||
): Task[F, Unit, Result] = {
|
||||
val combined =
|
||||
(
|
||||
@ -37,6 +38,7 @@ object HouseKeepingTask {
|
||||
CleanupInvitesTask(cfg.houseKeeping.cleanupInvites, store),
|
||||
CleanupJobsTask(cfg.houseKeeping.cleanupJobs, store),
|
||||
CleanupRememberMeTask(cfg.houseKeeping.cleanupRememberMe, store),
|
||||
CleanupDownloadsTask(cfg.houseKeeping.cleanupDownloads, downloadAll),
|
||||
IntegrityCheckTask(cfg.houseKeeping.integrityCheck, store, fileRepo)
|
||||
).mapN(Result.apply)
|
||||
|
||||
@ -63,6 +65,7 @@ object HouseKeepingTask {
|
||||
cleanupInvites: CleanupResult,
|
||||
cleanupJobs: CleanupResult,
|
||||
cleanupRememberMe: CleanupResult,
|
||||
cleanupDownloads: CleanupResult,
|
||||
integrityCheck: FileIntegrityCheckTask.Result
|
||||
)
|
||||
|
||||
@ -76,6 +79,7 @@ object HouseKeepingTask {
|
||||
s"- Invites removed: ${r.cleanupInvites.asString}\n" +
|
||||
s"- Jobs removed: ${r.cleanupJobs.asString}\n" +
|
||||
s"- RememberMe removed: ${r.cleanupRememberMe.asString}\n" +
|
||||
s"- Downloads remove: ${r.cleanupDownloads.asString}\n" +
|
||||
s"- Integrity check: ok=${r.integrityCheck.ok}, failed=${r.integrityCheck.failedKeys.size}, notFound=${r.integrityCheck.notFoundKeys.size}"
|
||||
}
|
||||
|
||||
|
@ -263,6 +263,7 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BasicResult"
|
||||
|
||||
/admin/fts/reIndexAll:
|
||||
post:
|
||||
operationId: "admin-fts-reindex-all"
|
||||
@ -333,6 +334,7 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/CheckFileResult"
|
||||
|
||||
/sec/upload/item:
|
||||
post:
|
||||
operationId: "sec-upload-new-item"
|
||||
@ -424,6 +426,130 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BasicResult"
|
||||
|
||||
/sec/downloadAll/prefetch:
|
||||
post:
|
||||
operationId: "sec-downloadall-prefetch"
|
||||
tags: [Download]
|
||||
summary: Return information about a potential zip download
|
||||
description: |
|
||||
This endpoint calculates the number of files and
|
||||
(uncompressed) size of the zip file that would be created with
|
||||
this request.
|
||||
|
||||
It also checks against configured thresholds and tells whether
|
||||
the server allows to ask for a download using this query.
|
||||
security:
|
||||
- authTokenHeader: []
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DownloadAllRequest"
|
||||
responses:
|
||||
422:
|
||||
description: BadRequest
|
||||
200:
|
||||
description: Ok
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DownloadAllSummary"
|
||||
/sec/downloadAll/submit:
|
||||
post:
|
||||
operationId: "sec-downloadall-submit"
|
||||
tags: [Download]
|
||||
summary: Submits a job to create a zip containing all files in the query
|
||||
description: |
|
||||
A job is submitted to create a ZIP file containing all the
|
||||
files that are included in the given query.
|
||||
|
||||
Once the job is done, the returned ID can be used to download
|
||||
the zip file.
|
||||
security:
|
||||
- authTokenHeader: []
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DownloadAllRequest"
|
||||
responses:
|
||||
422:
|
||||
description: BadRequest
|
||||
200:
|
||||
description: Ok
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DownloadAllSummary"
|
||||
|
||||
/sec/downloadAll/cancel/{id}:
|
||||
put:
|
||||
operationId: "sec-downloadall-cancel"
|
||||
tags: [Download]
|
||||
summary: Cancels potentially running jobs to create a download archive
|
||||
description: |
|
||||
If a job is running (created via the `submit` endpoint) to
|
||||
prepare a zip file for download, it is cancelled. The id is
|
||||
the download id as defined in the `prefetch` or `submit`
|
||||
responses.
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/id"
|
||||
security:
|
||||
- authTokenHeader: []
|
||||
responses:
|
||||
422:
|
||||
description: BadRequest
|
||||
200:
|
||||
description: Ok
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BasicResult"
|
||||
|
||||
/sec/downloadAll/file/{id}:
|
||||
get:
|
||||
operationId: "sec-downloadall-get-file"
|
||||
tags: [Download]
|
||||
summary: Download the zip file given the id
|
||||
description: |
|
||||
Download the zip file to the given id.
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/id"
|
||||
security:
|
||||
- authTokenHeader: []
|
||||
responses:
|
||||
422:
|
||||
description: BadRequest
|
||||
404:
|
||||
description: NotFound
|
||||
200:
|
||||
description: Ok
|
||||
content:
|
||||
application/octet-stream:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
delete:
|
||||
operationId: "sec-downloadall-delete-file"
|
||||
tags: [Download]
|
||||
summary: Deletets the zip file given the id
|
||||
description: |
|
||||
Deletes the zip file to the given id.
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/id"
|
||||
security:
|
||||
- authTokenHeader: []
|
||||
responses:
|
||||
422:
|
||||
description: BadRequest
|
||||
200:
|
||||
description: Ok
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/BasicResult"
|
||||
|
||||
/open/integration/item/{id}:
|
||||
get:
|
||||
operationId: "open-integration-item-check-collective"
|
||||
@ -2372,6 +2498,92 @@ paths:
|
||||
application/json:
|
||||
schema: {}
|
||||
|
||||
/share/downloadAll/prefetch:
|
||||
post:
|
||||
operationId: "share-downloadall-prefetch"
|
||||
tags: [Download, Share]
|
||||
summary: Return information about a potential zip download
|
||||
description: |
|
||||
This endpoint calculates the number of files and
|
||||
(uncompressed) size of the zip file that would be created with
|
||||
this request.
|
||||
|
||||
It also checks against configured thresholds and tells whether
|
||||
the server allows to ask for a download using this query.
|
||||
|
||||
This variant adds the query of the share and the `fileType`
|
||||
property in the request is ignored. It is always fixed to
|
||||
`converted`.
|
||||
security:
|
||||
- shareTokenHeader: []
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DownloadAllRequest"
|
||||
responses:
|
||||
422:
|
||||
description: BadRequest
|
||||
200:
|
||||
description: Ok
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DownloadAllSummary"
|
||||
/share/downloadAll/submit:
|
||||
post:
|
||||
operationId: "share-downloadall-submit"
|
||||
tags: [Download, Share]
|
||||
summary: Submits a job to create a zip containing all files in the query
|
||||
description: |
|
||||
A job is submitted to create a ZIP file containing all the
|
||||
files that are included in the given query.
|
||||
|
||||
Once the job is done, the returned ID can be used to download
|
||||
the zip file.
|
||||
|
||||
This variant adds the query of the share and the `fileType`
|
||||
property in the request is ignored. It is always fixed to
|
||||
`converted`.
|
||||
security:
|
||||
- shareTokenHeader: []
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DownloadAllRequest"
|
||||
responses:
|
||||
422:
|
||||
description: BadRequest
|
||||
200:
|
||||
description: Ok
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DownloadAllSummary"
|
||||
/share/downloadAll/file/{id}:
|
||||
get:
|
||||
operationId: "share-downloadall-get-file"
|
||||
tags: [Download, Share]
|
||||
summary: Download the zip file given the id
|
||||
description: |
|
||||
Download the zip file to the given id.
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/id"
|
||||
security:
|
||||
- shareTokenHeader: []
|
||||
responses:
|
||||
422:
|
||||
description: BadRequest
|
||||
404:
|
||||
description: NotFound
|
||||
200:
|
||||
description: Ok
|
||||
content:
|
||||
application/octet-stream:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
|
||||
/admin/user/resetPassword:
|
||||
post:
|
||||
@ -5581,6 +5793,59 @@ paths:
|
||||
|
||||
components:
|
||||
schemas:
|
||||
DownloadAllSummary:
|
||||
description: |
|
||||
Information about a ZIP download.
|
||||
required:
|
||||
- id
|
||||
- fileCount
|
||||
- uncompressedSize
|
||||
- state
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
format: ident
|
||||
description: Unique identifier for the download request
|
||||
fileCount:
|
||||
type: integer
|
||||
format: int32
|
||||
description: How many files are included
|
||||
uncompressedSize:
|
||||
type: integer
|
||||
format: bytesize
|
||||
description: The sum of sizes of all included files
|
||||
state:
|
||||
type: string
|
||||
format: downloadstate
|
||||
enum:
|
||||
- forbidden
|
||||
- notpresent
|
||||
- preparing
|
||||
- present
|
||||
- empty
|
||||
description: |
|
||||
A state for the download, it may not exist yet or be
|
||||
forbidden because it exceeds configured thresholds. Then a
|
||||
job may be running to create it or the file is present and
|
||||
ready to download.
|
||||
|
||||
DownloadAllRequest:
|
||||
description: |
|
||||
A request to download all files included in a query.
|
||||
required:
|
||||
- query
|
||||
- fileType
|
||||
properties:
|
||||
query:
|
||||
type: string
|
||||
format: itemquery
|
||||
fileType:
|
||||
type: string
|
||||
format: downloadalltype
|
||||
enum:
|
||||
- converted
|
||||
- original
|
||||
|
||||
ItemLinkData:
|
||||
description: |
|
||||
Data for changing the list of related items.
|
||||
|
@ -90,6 +90,15 @@ docspell.server {
|
||||
}
|
||||
}
|
||||
|
||||
# Settings for "download as zip"
|
||||
download-all {
|
||||
# How many files to allow in a zip.
|
||||
max-files = 500
|
||||
|
||||
# The maximum (uncompressed) size of the zip file contents.
|
||||
max-size = 1400M
|
||||
}
|
||||
|
||||
# Configures OpenID Connect (OIDC) or OAuth2 authentication. Only
|
||||
# the "Authorization Code Flow" is supported.
|
||||
#
|
||||
|
@ -14,7 +14,7 @@ import docspell.ftssolr.SolrConfig
|
||||
import docspell.logging.LogConfig
|
||||
import docspell.oidc.ProviderConfig
|
||||
import docspell.pubsub.naive.PubSubConfig
|
||||
import docspell.restserver.Config.{OpenIdConfig, ServerOptions}
|
||||
import docspell.restserver.Config.{DownloadAllCfg, OpenIdConfig, ServerOptions}
|
||||
import docspell.restserver.auth.OpenId
|
||||
import docspell.restserver.http4s.InternalHeader
|
||||
|
||||
@ -36,7 +36,8 @@ case class Config(
|
||||
maxNoteLength: Int,
|
||||
fullTextSearch: Config.FullTextSearch,
|
||||
adminEndpoint: Config.AdminEndpoint,
|
||||
openid: List[OpenIdConfig]
|
||||
openid: List[OpenIdConfig],
|
||||
downloadAll: DownloadAllCfg
|
||||
) {
|
||||
def openIdEnabled: Boolean =
|
||||
openid.exists(_.enabled)
|
||||
@ -51,6 +52,7 @@ case class Config(
|
||||
}
|
||||
|
||||
object Config {
|
||||
case class DownloadAllCfg(maxFiles: Int, maxSize: ByteSize)
|
||||
|
||||
case class ServerOptions(
|
||||
responseTimeout: Duration,
|
||||
|
@ -93,8 +93,10 @@ final class RestAppImpl[F[_]: Async](
|
||||
"search" -> ShareSearchRoutes(backend, config, token),
|
||||
"attachment" -> ShareAttachmentRoutes(backend, token),
|
||||
"item" -> ShareItemRoutes(backend, token),
|
||||
"clientSettings" -> ClientSettingsRoutes.share(backend, token)
|
||||
"clientSettings" -> ClientSettingsRoutes.share(backend, token),
|
||||
"downloadAll" -> DownloadAllRoutes.forShare(config.downloadAll, backend, token)
|
||||
)
|
||||
|
||||
def openRoutes(
|
||||
client: Client[F]
|
||||
): HttpRoutes[F] =
|
||||
@ -149,7 +151,8 @@ final class RestAppImpl[F[_]: Async](
|
||||
"customfield" -> CustomFieldRoutes(backend, token),
|
||||
"clientSettings" -> ClientSettingsRoutes(backend, token),
|
||||
"notification" -> NotificationRoutes(config, backend, token),
|
||||
"querybookmark" -> BookmarkRoutes(backend, token)
|
||||
"querybookmark" -> BookmarkRoutes(backend, token),
|
||||
"downloadAll" -> DownloadAllRoutes(config.downloadAll, backend, token)
|
||||
)
|
||||
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ import cats.data.OptionT
|
||||
import cats.effect._
|
||||
import cats.implicits._
|
||||
|
||||
import docspell.backend.ops.OItemSearch.{AttachmentData, AttachmentPreviewData}
|
||||
import docspell.backend.ops.OItemSearch.{AttachmentPreviewData, BinaryData}
|
||||
import docspell.backend.ops._
|
||||
import docspell.restapi.model.BasicResult
|
||||
import docspell.restserver.http4s.{QueryParam => QP}
|
||||
@ -27,7 +27,7 @@ import org.typelevel.ci.CIString
|
||||
object BinaryUtil {
|
||||
|
||||
def respond[F[_]: Async](dsl: Http4sDsl[F], req: Request[F])(
|
||||
fileData: Option[AttachmentData[F]]
|
||||
fileData: Option[BinaryData[F]]
|
||||
): F[Response[F]] = {
|
||||
import dsl._
|
||||
|
||||
@ -42,7 +42,7 @@ object BinaryUtil {
|
||||
}
|
||||
|
||||
def respondHead[F[_]: Async](dsl: Http4sDsl[F])(
|
||||
fileData: Option[AttachmentData[F]]
|
||||
fileData: Option[BinaryData[F]]
|
||||
): F[Response[F]] = {
|
||||
import dsl._
|
||||
|
||||
|
@ -0,0 +1,147 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.restserver.routes
|
||||
|
||||
import cats.data.{Kleisli, OptionT}
|
||||
import cats.effect._
|
||||
import cats.syntax.all._
|
||||
|
||||
import docspell.backend.BackendApp
|
||||
import docspell.backend.auth.{AuthToken, ShareToken}
|
||||
import docspell.backend.ops.ODownloadAll.model._
|
||||
import docspell.backend.ops.OShare.ShareQuery
|
||||
import docspell.common.{DownloadAllType, Ident}
|
||||
import docspell.joexapi.model.BasicResult
|
||||
import docspell.query.ItemQuery
|
||||
import docspell.restapi.model.{DownloadAllRequest, DownloadAllSummary}
|
||||
import docspell.restserver.Config.DownloadAllCfg
|
||||
import docspell.restserver.conv.Conversions
|
||||
import docspell.restserver.http4s.BinaryUtil
|
||||
|
||||
import org.http4s.circe.CirceEntityCodec._
|
||||
import org.http4s.dsl.Http4sDsl
|
||||
import org.http4s.{HttpRoutes, Request}
|
||||
|
||||
object DownloadAllRoutes {
|
||||
|
||||
def forShare[F[_]: Async](
|
||||
cfg: DownloadAllCfg,
|
||||
backend: BackendApp[F],
|
||||
token: ShareToken
|
||||
): HttpRoutes[F] = {
|
||||
val dsl = new Http4sDsl[F] {}
|
||||
import dsl._
|
||||
|
||||
val find: Kleisli[OptionT[F, *], Request[F], ShareQuery] =
|
||||
Kleisli(_ => backend.share.findShareQuery(token.id))
|
||||
|
||||
find.flatMap { share =>
|
||||
HttpRoutes.of[F] {
|
||||
case req @ POST -> Root / "prefetch" =>
|
||||
for {
|
||||
input <- req.as[DownloadAllRequest]
|
||||
query = ItemQuery.Expr.and(share.query.expr, input.query.expr)
|
||||
result <- backend.downloadAll.getSummary(
|
||||
share.account,
|
||||
DownloadRequest(
|
||||
ItemQuery(query, None),
|
||||
DownloadAllType.Converted,
|
||||
cfg.maxFiles,
|
||||
cfg.maxSize
|
||||
)
|
||||
)
|
||||
resp <- Ok(convertSummary(result))
|
||||
} yield resp
|
||||
|
||||
case req @ POST -> Root / "submit" =>
|
||||
for {
|
||||
input <- req.as[DownloadAllRequest]
|
||||
query = ItemQuery.Expr.and(share.query.expr, input.query.expr)
|
||||
result <- backend.downloadAll.submit(
|
||||
share.account,
|
||||
DownloadRequest(
|
||||
ItemQuery(query, None),
|
||||
DownloadAllType.Converted,
|
||||
cfg.maxFiles,
|
||||
cfg.maxSize
|
||||
)
|
||||
)
|
||||
resp <- Ok(convertSummary(result))
|
||||
} yield resp
|
||||
|
||||
case req @ GET -> Root / "file" / Ident(id) =>
|
||||
for {
|
||||
data <- backend.downloadAll.getFile(share.account.collective, id)
|
||||
resp <- BinaryUtil.respond(dsl, req)(data)
|
||||
} yield resp
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def apply[F[_]: Async](
|
||||
cfg: DownloadAllCfg,
|
||||
backend: BackendApp[F],
|
||||
token: AuthToken
|
||||
): HttpRoutes[F] = {
|
||||
val dsl = new Http4sDsl[F] {}
|
||||
import dsl._
|
||||
|
||||
HttpRoutes.of {
|
||||
case req @ POST -> Root / "prefetch" =>
|
||||
for {
|
||||
input <- req.as[DownloadAllRequest]
|
||||
result <- backend.downloadAll.getSummary(
|
||||
token.account,
|
||||
DownloadRequest(input.query, input.fileType, cfg.maxFiles, cfg.maxSize)
|
||||
)
|
||||
resp <- Ok(convertSummary(result))
|
||||
} yield resp
|
||||
|
||||
case req @ POST -> Root / "submit" =>
|
||||
for {
|
||||
input <- req.as[DownloadAllRequest]
|
||||
result <- backend.downloadAll.submit(
|
||||
token.account,
|
||||
DownloadRequest(input.query, input.fileType, cfg.maxFiles, cfg.maxSize)
|
||||
)
|
||||
resp <- Ok(convertSummary(result))
|
||||
} yield resp
|
||||
|
||||
case req @ GET -> Root / "file" / Ident(id) =>
|
||||
for {
|
||||
data <- backend.downloadAll.getFile(token.account.collective, id)
|
||||
resp <- BinaryUtil.respond(dsl, req)(data)
|
||||
} yield resp
|
||||
|
||||
case HEAD -> Root / "file" / Ident(id) =>
|
||||
for {
|
||||
data <- backend.downloadAll.getFile(token.account.collective, id)
|
||||
resp <- BinaryUtil.respondHead(dsl)(data)
|
||||
} yield resp
|
||||
|
||||
case DELETE -> Root / "file" / Ident(id) =>
|
||||
for {
|
||||
_ <- backend.downloadAll.deleteFile(id)
|
||||
resp <- Ok(BasicResult(true, "File deleted."))
|
||||
} yield resp
|
||||
|
||||
case PUT -> Root / "cancel" / Ident(id) =>
|
||||
for {
|
||||
res <- backend.downloadAll.cancelDownload(token.account, id)
|
||||
resp <- Ok(Conversions.basicResult(res))
|
||||
} yield resp
|
||||
}
|
||||
}
|
||||
|
||||
private def convertSummary(result: DownloadSummary): DownloadAllSummary =
|
||||
DownloadAllSummary(
|
||||
id = result.id,
|
||||
fileCount = result.fileCount,
|
||||
uncompressedSize = result.uncompressedSize,
|
||||
state = result.state
|
||||
)
|
||||
}
|
@ -7,7 +7,7 @@
|
||||
package docspell.restserver.webapp
|
||||
|
||||
import docspell.backend.signup.{Config => SignupConfig}
|
||||
import docspell.common.{Ident, LenientUri}
|
||||
import docspell.common.{ByteSize, Ident, LenientUri}
|
||||
import docspell.restserver.{BuildInfo, Config}
|
||||
|
||||
import io.circe._
|
||||
@ -26,6 +26,8 @@ case class Flags(
|
||||
maxPageSize: Int,
|
||||
maxNoteLength: Int,
|
||||
showClassificationSettings: Boolean,
|
||||
downloadAllMaxFiles: Int,
|
||||
downloadAllMaxSize: ByteSize,
|
||||
uiVersion: Int,
|
||||
openIdAuth: List[Flags.OpenIdAuth]
|
||||
)
|
||||
@ -42,6 +44,8 @@ object Flags {
|
||||
cfg.maxItemPageSize,
|
||||
cfg.maxNoteLength,
|
||||
cfg.showClassificationSettings,
|
||||
cfg.downloadAll.maxFiles,
|
||||
cfg.downloadAll.maxSize,
|
||||
uiVersion,
|
||||
cfg.openid.filter(_.enabled).map(c => OpenIdAuth(c.provider.providerId, c.display))
|
||||
)
|
||||
@ -63,6 +67,9 @@ object Flags {
|
||||
implicit val jsonEncoder: Encoder[Flags] =
|
||||
deriveEncoder[Flags]
|
||||
|
||||
implicit def yamuscaByteSizeConverter: ValueConverter[ByteSize] =
|
||||
ValueConverter.of(sz => Value.fromString(sz.bytes.toString))
|
||||
|
||||
implicit def yamuscaIdentConverter: ValueConverter[Ident] =
|
||||
ValueConverter.of(id => Value.fromString(id.id))
|
||||
implicit def yamuscaOpenIdAuthConverter: ValueConverter[OpenIdAuth] =
|
||||
|
@ -18,7 +18,8 @@ trait JobStore[F[_]] {
|
||||
/** Inserts the job into the queue only, if there is no job with the same tracker-id
|
||||
* running at the moment. The job id must be a new unique id.
|
||||
*
|
||||
* If the job has no tracker defined, it is simply inserted.
|
||||
* If the job has no tracker defined, it is simply inserted. It returns `true` if the
|
||||
* job was inserted.
|
||||
*/
|
||||
def insertIfNew(job: Job[String]): F[Boolean]
|
||||
|
||||
|
@ -0,0 +1,11 @@
|
||||
CREATE TABLE "download_query"(
|
||||
"id" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"file_id" varchar(254) not null,
|
||||
"file_count" int not null,
|
||||
"created" timestamp not null,
|
||||
"last_access" timestamp,
|
||||
"access_count" int not null,
|
||||
foreign key ("cid") references "collective"("cid"),
|
||||
foreign key ("file_id") references "filemeta"("file_id")
|
||||
);
|
@ -0,0 +1,11 @@
|
||||
CREATE TABLE `download_query`(
|
||||
`id` varchar(254) not null primary key,
|
||||
`cid` varchar(254) not null,
|
||||
`file_id` varchar(254) not null,
|
||||
`file_count` int not null,
|
||||
`created` timestamp not null,
|
||||
`last_access` timestamp,
|
||||
`access_count` int not null,
|
||||
foreign key (`cid`) references `collective`(`cid`),
|
||||
foreign key (`file_id`) references `filemeta`(`file_id`)
|
||||
);
|
@ -0,0 +1,11 @@
|
||||
CREATE TABLE "download_query"(
|
||||
"id" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"file_id" varchar(254) not null,
|
||||
"file_count" int not null,
|
||||
"created" timestamp not null,
|
||||
"last_access" timestamp,
|
||||
"access_count" int not null,
|
||||
foreign key ("cid") references "collective"("cid"),
|
||||
foreign key ("file_id") references "filemeta"("file_id")
|
||||
);
|
@ -47,6 +47,12 @@ trait DSL extends DoobieMeta {
|
||||
def select(e: SelectExpr, es: SelectExpr*): Nel[SelectExpr] =
|
||||
Nel(e, es.toList)
|
||||
|
||||
def combineNel[A](e: Nel[A], more: Nel[A]*): Nel[A] =
|
||||
Nel
|
||||
.fromFoldable(more)
|
||||
.map(tail => tail.prepend(e).flatMap(identity))
|
||||
.getOrElse(e)
|
||||
|
||||
def select(c: Column[_], cs: Column[_]*): Nel[SelectExpr] =
|
||||
Nel(c, cs.toList).map(col => SelectExpr.SelectColumn(col, None))
|
||||
|
||||
|
@ -0,0 +1,29 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.store.queries
|
||||
|
||||
import docspell.common._
|
||||
import docspell.store.records.RFileMeta
|
||||
|
||||
/** Almost like [[ListItem]] but without notes and at file level. */
|
||||
final case class ItemFileMeta(
|
||||
id: Ident,
|
||||
name: String,
|
||||
state: ItemState,
|
||||
date: Timestamp,
|
||||
dueDate: Option[Timestamp],
|
||||
source: String,
|
||||
direction: Direction,
|
||||
created: Timestamp,
|
||||
corrOrg: Option[IdRef],
|
||||
corrPerson: Option[IdRef],
|
||||
concPerson: Option[IdRef],
|
||||
concEquip: Option[IdRef],
|
||||
folder: Option[IdRef],
|
||||
fileName: Option[String],
|
||||
fileMeta: RFileMeta
|
||||
)
|
@ -38,9 +38,11 @@ object QItem {
|
||||
private val cf = RCustomField.as("cf")
|
||||
private val cv = RCustomFieldValue.as("cvf")
|
||||
private val a = RAttachment.as("a")
|
||||
private val as = RAttachmentSource.as("ras")
|
||||
private val m = RAttachmentMeta.as("m")
|
||||
private val tag = RTag.as("t")
|
||||
private val ti = RTagItem.as("ti")
|
||||
private val meta = RFileMeta.as("fmeta")
|
||||
|
||||
def countAttachmentsAndItems(items: Nel[Ident]): ConnectionIO[Int] =
|
||||
Select(count(a.id).s, from(a), a.itemId.in(items)).build
|
||||
@ -176,6 +178,87 @@ object QItem {
|
||||
)
|
||||
}
|
||||
|
||||
private def findFilesQuery(
|
||||
q: Query,
|
||||
ftype: DownloadAllType,
|
||||
today: LocalDate,
|
||||
maxFiles: Int
|
||||
): Select =
|
||||
findItemsBase(q.fix, today, 0)
|
||||
.changeFrom(_.innerJoin(a, a.itemId === i.id).innerJoin(as, a.id === as.id))
|
||||
.changeFrom(from =>
|
||||
ftype match {
|
||||
case DownloadAllType.Converted =>
|
||||
from.innerJoin(meta, meta.id === a.fileId)
|
||||
case DownloadAllType.Original =>
|
||||
from.innerJoin(meta, meta.id === as.fileId)
|
||||
}
|
||||
)
|
||||
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
||||
.limit(maxFiles)
|
||||
|
||||
def findFiles(
|
||||
q: Query,
|
||||
ftype: DownloadAllType,
|
||||
today: LocalDate,
|
||||
maxFiles: Int,
|
||||
chunkSize: Int
|
||||
): Stream[ConnectionIO, RFileMeta] = {
|
||||
val query = findFilesQuery(q, ftype, today, maxFiles)
|
||||
.withSelect(
|
||||
meta.all.map(_.s).append(coalesce(i.itemDate.s, i.created.s).s)
|
||||
)
|
||||
|
||||
query.build
|
||||
.query[RFileMeta]
|
||||
.streamWithChunkSize(chunkSize)
|
||||
}
|
||||
|
||||
def findFilesDetailed(
|
||||
q: Query,
|
||||
ftype: DownloadAllType,
|
||||
today: LocalDate,
|
||||
maxFiles: Int,
|
||||
chunkSize: Int
|
||||
): Stream[ConnectionIO, ItemFileMeta] = {
|
||||
val fname = ftype match {
|
||||
case DownloadAllType.Converted => a.name
|
||||
case DownloadAllType.Original => as.name
|
||||
}
|
||||
|
||||
val query = findFilesQuery(q, ftype, today, maxFiles)
|
||||
.withSelect(
|
||||
combineNel(
|
||||
select(
|
||||
i.id.s,
|
||||
i.name.s,
|
||||
i.state.s,
|
||||
coalesce(i.itemDate.s, i.created.s).s,
|
||||
i.dueDate.s,
|
||||
i.source.s,
|
||||
i.incoming.s,
|
||||
i.created.s,
|
||||
org.oid.s,
|
||||
org.name.s,
|
||||
pers0.pid.s,
|
||||
pers0.name.s,
|
||||
pers1.pid.s,
|
||||
pers1.name.s,
|
||||
equip.eid.s,
|
||||
equip.name.s,
|
||||
f.id.s,
|
||||
f.name.s
|
||||
),
|
||||
select(fname.s),
|
||||
select(meta.all)
|
||||
)
|
||||
)
|
||||
|
||||
query.build
|
||||
.query[ItemFileMeta]
|
||||
.streamWithChunkSize(chunkSize)
|
||||
}
|
||||
|
||||
def queryCondFromExpr(today: LocalDate, coll: Ident, q: ItemQuery.Expr): Condition = {
|
||||
val tables = Tables(i, org, pers0, pers1, equip, f, a, m, AttachCountTable("cta"))
|
||||
ItemQueryGenerator.fromExpr(today, tables, coll)(q)
|
||||
|
@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.store.records
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
import docspell.common._
|
||||
import docspell.store.qb.DSL._
|
||||
import docspell.store.qb._
|
||||
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
|
||||
final case class RDownloadQuery(
|
||||
id: Ident,
|
||||
cid: Ident,
|
||||
fileId: FileKey,
|
||||
fileCount: Int,
|
||||
created: Timestamp,
|
||||
lastAccess: Option[Timestamp],
|
||||
accessCount: Int
|
||||
) {}
|
||||
|
||||
object RDownloadQuery {
|
||||
|
||||
case class Table(alias: Option[String]) extends TableDef {
|
||||
val tableName = "download_query"
|
||||
|
||||
val id: Column[Ident] = Column("id", this)
|
||||
val cid: Column[Ident] = Column("cid", this)
|
||||
val fileId: Column[FileKey] = Column("file_id", this)
|
||||
val fileCount: Column[Int] = Column("file_count", this)
|
||||
val created: Column[Timestamp] = Column("created", this)
|
||||
val lastAccess: Column[Timestamp] = Column("last_access", this)
|
||||
val accessCount: Column[Int] = Column("access_count", this)
|
||||
|
||||
val all: NonEmptyList[Column[_]] =
|
||||
NonEmptyList.of(id, cid, fileId, fileCount, created, lastAccess, accessCount)
|
||||
}
|
||||
|
||||
def as(alias: String): Table =
|
||||
Table(Some(alias))
|
||||
|
||||
val T = Table(None)
|
||||
|
||||
def insert(r: RDownloadQuery): ConnectionIO[Int] =
|
||||
DML.insert(
|
||||
T,
|
||||
T.all,
|
||||
sql"${r.id},${r.cid},${r.fileId},${r.fileCount},${r.created},${r.lastAccess},${r.accessCount}"
|
||||
)
|
||||
|
||||
def existsById(id: Ident): ConnectionIO[Boolean] =
|
||||
Select(select(count(T.id)), from(T), T.id === id).build.query[Int].unique.map(_ > 0)
|
||||
|
||||
def findById(id: Ident): ConnectionIO[Option[(RDownloadQuery, RFileMeta)]] = {
|
||||
val dq = RDownloadQuery.as("dq")
|
||||
val fm = RFileMeta.as("fm")
|
||||
Select(
|
||||
select(dq.all, fm.all),
|
||||
from(dq).innerJoin(fm, fm.id === dq.fileId),
|
||||
dq.id === id
|
||||
).build
|
||||
.query[(RDownloadQuery, RFileMeta)]
|
||||
.option
|
||||
}
|
||||
|
||||
def updateAccess(id: Ident, ts: Timestamp): ConnectionIO[Int] =
|
||||
DML.update(
|
||||
T,
|
||||
T.id === id,
|
||||
DML.set(
|
||||
T.lastAccess.setTo(ts),
|
||||
T.accessCount.increment(1)
|
||||
)
|
||||
)
|
||||
|
||||
def updateAccessNow(id: Ident): ConnectionIO[Int] =
|
||||
Timestamp
|
||||
.current[ConnectionIO]
|
||||
.flatMap(updateAccess(id, _))
|
||||
|
||||
def deleteById(id: Ident): ConnectionIO[Int] =
|
||||
DML.delete(T, T.id === id)
|
||||
|
||||
def deleteByFileKey(fkey: FileKey): ConnectionIO[Int] =
|
||||
DML.delete(T, T.fileId === fkey)
|
||||
|
||||
def findOlderThan(ts: Timestamp, batch: Int): ConnectionIO[List[FileKey]] =
|
||||
Select(
|
||||
select(T.fileId),
|
||||
from(T),
|
||||
T.lastAccess.isNull || T.lastAccess < ts
|
||||
).limit(batch)
|
||||
.build
|
||||
.query[FileKey]
|
||||
.to[List]
|
||||
}
|
@ -11,7 +11,7 @@ import cats.implicits._
|
||||
import fs2.Stream
|
||||
|
||||
import docspell.common.{FileKey, _}
|
||||
import docspell.store.file.BinnyUtils
|
||||
import docspell.store.file.{BinnyUtils, FileMetadata}
|
||||
import docspell.store.qb.DSL._
|
||||
import docspell.store.qb._
|
||||
|
||||
@ -25,7 +25,10 @@ final case class RFileMeta(
|
||||
mimetype: MimeType,
|
||||
length: ByteSize,
|
||||
checksum: ByteVector
|
||||
)
|
||||
) {
|
||||
def toFileMetadata: FileMetadata =
|
||||
FileMetadata(id, created, mimetype, length, checksum)
|
||||
}
|
||||
|
||||
object RFileMeta {
|
||||
final case class Table(alias: Option[String]) extends TableDef {
|
||||
|
@ -60,6 +60,9 @@ module Api exposing
|
||||
, deleteTag
|
||||
, deleteUser
|
||||
, disableOtp
|
||||
, downloadAllLink
|
||||
, downloadAllPrefetch
|
||||
, downloadAllSubmit
|
||||
, fileURL
|
||||
, getAllDashboards
|
||||
, getAttachmentMeta
|
||||
@ -175,6 +178,9 @@ module Api exposing
|
||||
, setTagsMultiple
|
||||
, setUnconfirmed
|
||||
, shareAttachmentPreviewURL
|
||||
, shareDownloadAllLink
|
||||
, shareDownloadAllPrefetch
|
||||
, shareDownloadAllSubmit
|
||||
, shareFileURL
|
||||
, shareItemBasePreviewURL
|
||||
, shareSendMail
|
||||
@ -218,6 +224,8 @@ import Api.Model.CustomFieldList exposing (CustomFieldList)
|
||||
import Api.Model.CustomFieldValue exposing (CustomFieldValue)
|
||||
import Api.Model.DeleteUserData exposing (DeleteUserData)
|
||||
import Api.Model.DirectionValue exposing (DirectionValue)
|
||||
import Api.Model.DownloadAllRequest exposing (DownloadAllRequest)
|
||||
import Api.Model.DownloadAllSummary exposing (DownloadAllSummary)
|
||||
import Api.Model.EmailSettings exposing (EmailSettings)
|
||||
import Api.Model.EmailSettingsList exposing (EmailSettingsList)
|
||||
import Api.Model.EmptyTrashSetting exposing (EmptyTrashSetting)
|
||||
@ -3084,6 +3092,70 @@ removeRelatedItem flags item1 item2 receive =
|
||||
|
||||
|
||||
|
||||
--- DownloadAll
|
||||
|
||||
|
||||
downloadAllPrefetch : Flags -> DownloadAllRequest -> (Result Http.Error DownloadAllSummary -> msg) -> Cmd msg
|
||||
downloadAllPrefetch flags req receive =
|
||||
Http2.authPost
|
||||
{ url = flags.config.baseUrl ++ "/api/v1/sec/downloadAll/prefetch"
|
||||
, account = getAccount flags
|
||||
, body = Http.jsonBody (Api.Model.DownloadAllRequest.encode req)
|
||||
, expect = Http.expectJson receive Api.Model.DownloadAllSummary.decoder
|
||||
}
|
||||
|
||||
|
||||
downloadAllSubmit : Flags -> DownloadAllRequest -> (Result Http.Error DownloadAllSummary -> msg) -> Cmd msg
|
||||
downloadAllSubmit flags req receive =
|
||||
Http2.authPost
|
||||
{ url = flags.config.baseUrl ++ "/api/v1/sec/downloadAll/submit"
|
||||
, account = getAccount flags
|
||||
, body = Http.jsonBody (Api.Model.DownloadAllRequest.encode req)
|
||||
, expect = Http.expectJson receive Api.Model.DownloadAllSummary.decoder
|
||||
}
|
||||
|
||||
|
||||
downloadAllLink : Flags -> String -> String
|
||||
downloadAllLink flags id =
|
||||
flags.config.baseUrl ++ "/api/v1/sec/downloadAll/file/" ++ id
|
||||
|
||||
|
||||
shareDownloadAllPrefetch :
|
||||
Flags
|
||||
-> String
|
||||
-> DownloadAllRequest
|
||||
-> (Result Http.Error DownloadAllSummary -> msg)
|
||||
-> Cmd msg
|
||||
shareDownloadAllPrefetch flags token req receive =
|
||||
Http2.sharePost
|
||||
{ url = flags.config.baseUrl ++ "/api/v1/share/downloadAll/prefetch"
|
||||
, token = token
|
||||
, body = Http.jsonBody (Api.Model.DownloadAllRequest.encode req)
|
||||
, expect = Http.expectJson receive Api.Model.DownloadAllSummary.decoder
|
||||
}
|
||||
|
||||
|
||||
shareDownloadAllSubmit :
|
||||
Flags
|
||||
-> String
|
||||
-> DownloadAllRequest
|
||||
-> (Result Http.Error DownloadAllSummary -> msg)
|
||||
-> Cmd msg
|
||||
shareDownloadAllSubmit flags token req receive =
|
||||
Http2.sharePost
|
||||
{ url = flags.config.baseUrl ++ "/api/v1/share/downloadAll/submit"
|
||||
, token = token
|
||||
, body = Http.jsonBody (Api.Model.DownloadAllRequest.encode req)
|
||||
, expect = Http.expectJson receive Api.Model.DownloadAllSummary.decoder
|
||||
}
|
||||
|
||||
|
||||
shareDownloadAllLink : Flags -> String -> String
|
||||
shareDownloadAllLink flags id =
|
||||
flags.config.baseUrl ++ "/api/v1/share/downloadAll/file/" ++ id
|
||||
|
||||
|
||||
|
||||
--- Helper
|
||||
|
||||
|
||||
|
@ -14,6 +14,7 @@ import Api
|
||||
import App.Data exposing (..)
|
||||
import Browser exposing (UrlRequest(..))
|
||||
import Browser.Navigation as Nav
|
||||
import Comp.DownloadAll
|
||||
import Data.AppEvent exposing (AppEvent(..))
|
||||
import Data.Environment as Env
|
||||
import Data.Flags
|
||||
@ -317,6 +318,9 @@ updateWithSub msg model =
|
||||
isProcessItem =
|
||||
task == "process-item"
|
||||
|
||||
isDownloadZip =
|
||||
task == "download-query-zip"
|
||||
|
||||
newModel =
|
||||
{ model
|
||||
| showNewItemsArrived = isProcessItem && not (Page.isSearchPage model.page)
|
||||
@ -326,6 +330,9 @@ updateWithSub msg model =
|
||||
if Page.isSearchPage model.page && isProcessItem then
|
||||
updateSearch texts Page.Search.Data.RefreshView newModel
|
||||
|
||||
else if Page.isSearchPage model.page && isDownloadZip then
|
||||
updateSearch texts (Page.Search.Data.DownloadAllMsg Comp.DownloadAll.checkDownload) newModel
|
||||
|
||||
else if Page.isDashboardPage model.page && isProcessItem then
|
||||
updateDashboard texts Page.Dashboard.Data.reloadDashboardData newModel
|
||||
|
||||
|
349
modules/webapp/src/main/elm/Comp/DownloadAll.elm
Normal file
349
modules/webapp/src/main/elm/Comp/DownloadAll.elm
Normal file
@ -0,0 +1,349 @@
|
||||
{-
|
||||
Copyright 2020 Eike K. & Contributors
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
-}
|
||||
|
||||
|
||||
module Comp.DownloadAll exposing (AccessMode(..), Model, Msg, UpdateResult, checkDownload, init, isPreparing, update, view)
|
||||
|
||||
import Api
|
||||
import Api.Model.DownloadAllRequest exposing (DownloadAllRequest)
|
||||
import Api.Model.DownloadAllSummary exposing (DownloadAllSummary)
|
||||
import Comp.Basic as B
|
||||
import Comp.FixedDropdown
|
||||
import Data.DownloadAllState
|
||||
import Data.DownloadFileType exposing (DownloadFileType)
|
||||
import Data.DropdownStyle as DS
|
||||
import Data.Flags exposing (Flags)
|
||||
import Html exposing (Html, a, div, i, label, text)
|
||||
import Html.Attributes exposing (class, classList, disabled, href)
|
||||
import Html.Events exposing (onClick)
|
||||
import Http
|
||||
import Messages.Comp.DownloadAll exposing (Texts)
|
||||
import Styles as S
|
||||
import Util.Size
|
||||
|
||||
|
||||
type alias Model =
|
||||
{ summary : DownloadAllSummary
|
||||
, query : String
|
||||
, dlType : DownloadFileType
|
||||
, dlTypeDropdown : Comp.FixedDropdown.Model DownloadFileType
|
||||
, loading : Bool
|
||||
, formError : FormError
|
||||
, accessMode : AccessMode
|
||||
}
|
||||
|
||||
|
||||
type AccessMode
|
||||
= AccessShare String
|
||||
| AccessUser
|
||||
|
||||
|
||||
type FormError
|
||||
= FormNone
|
||||
| FormHttpError Http.Error
|
||||
|
||||
|
||||
init : AccessMode -> Flags -> String -> ( Model, Cmd Msg )
|
||||
init am flags query =
|
||||
let
|
||||
model =
|
||||
{ summary = Api.Model.DownloadAllSummary.empty
|
||||
, query = query
|
||||
, dlType = Data.DownloadFileType.Converted
|
||||
, dlTypeDropdown = Comp.FixedDropdown.init Data.DownloadFileType.all
|
||||
, formError = FormNone
|
||||
, accessMode = am
|
||||
, loading = False
|
||||
}
|
||||
in
|
||||
( model
|
||||
, prefetch flags model
|
||||
)
|
||||
|
||||
|
||||
type Msg
|
||||
= DownloadSummaryResp (Result Http.Error DownloadAllSummary)
|
||||
| DlTypeMsg (Comp.FixedDropdown.Msg DownloadFileType)
|
||||
| CloseAction
|
||||
| SubmitAction
|
||||
| CheckAction
|
||||
|
||||
|
||||
checkDownload : Msg
|
||||
checkDownload =
|
||||
CheckAction
|
||||
|
||||
|
||||
isPreparing : Model -> Bool
|
||||
isPreparing model =
|
||||
Data.DownloadAllState.fromString model.summary.state == Just Data.DownloadAllState.Preparing
|
||||
|
||||
|
||||
makeRequest : Model -> DownloadAllRequest
|
||||
makeRequest model =
|
||||
{ query = model.query
|
||||
, fileType = Data.DownloadFileType.asString model.dlType
|
||||
}
|
||||
|
||||
|
||||
|
||||
--- Update
|
||||
|
||||
|
||||
type alias UpdateResult =
|
||||
{ model : Model
|
||||
, cmd : Cmd Msg
|
||||
, closed : Bool
|
||||
}
|
||||
|
||||
|
||||
update : Flags -> Msg -> Model -> UpdateResult
|
||||
update flags msg model =
|
||||
case msg of
|
||||
DownloadSummaryResp (Ok summary) ->
|
||||
unit { model | summary = summary, formError = FormNone, loading = False }
|
||||
|
||||
DownloadSummaryResp (Err err) ->
|
||||
unit { model | formError = FormHttpError err, loading = False }
|
||||
|
||||
DlTypeMsg lm ->
|
||||
let
|
||||
( dlm, sel ) =
|
||||
Comp.FixedDropdown.update lm model.dlTypeDropdown
|
||||
|
||||
nextDlType =
|
||||
Maybe.withDefault model.dlType sel
|
||||
|
||||
nextModel =
|
||||
{ model
|
||||
| dlTypeDropdown = dlm
|
||||
, dlType = nextDlType
|
||||
, formError = FormNone
|
||||
}
|
||||
in
|
||||
if nextDlType /= model.dlType && sel /= Nothing then
|
||||
unitCmd
|
||||
( { nextModel | loading = True }
|
||||
, prefetch flags nextModel
|
||||
)
|
||||
|
||||
else
|
||||
unit { model | dlTypeDropdown = dlm }
|
||||
|
||||
CloseAction ->
|
||||
UpdateResult model Cmd.none True
|
||||
|
||||
SubmitAction ->
|
||||
unitCmd
|
||||
( model
|
||||
, submit flags model
|
||||
)
|
||||
|
||||
CheckAction ->
|
||||
unitCmd
|
||||
( model
|
||||
, prefetch flags model
|
||||
)
|
||||
|
||||
|
||||
unit : Model -> UpdateResult
|
||||
unit model =
|
||||
UpdateResult model Cmd.none False
|
||||
|
||||
|
||||
unitCmd : ( Model, Cmd Msg ) -> UpdateResult
|
||||
unitCmd ( m, c ) =
|
||||
UpdateResult m c False
|
||||
|
||||
|
||||
prefetch : Flags -> Model -> Cmd Msg
|
||||
prefetch flags model =
|
||||
case model.accessMode of
|
||||
AccessUser ->
|
||||
Api.downloadAllPrefetch flags (makeRequest model) DownloadSummaryResp
|
||||
|
||||
AccessShare shareId ->
|
||||
Api.shareDownloadAllPrefetch flags shareId (makeRequest model) DownloadSummaryResp
|
||||
|
||||
|
||||
submit : Flags -> Model -> Cmd Msg
|
||||
submit flags model =
|
||||
case model.accessMode of
|
||||
AccessUser ->
|
||||
Api.downloadAllSubmit flags (makeRequest model) DownloadSummaryResp
|
||||
|
||||
AccessShare shareId ->
|
||||
Api.shareDownloadAllSubmit flags shareId (makeRequest model) DownloadSummaryResp
|
||||
|
||||
|
||||
downloadLink : Flags -> Model -> String
|
||||
downloadLink flags model =
|
||||
case model.accessMode of
|
||||
AccessUser ->
|
||||
Api.downloadAllLink flags model.summary.id
|
||||
|
||||
AccessShare _ ->
|
||||
Api.shareDownloadAllLink flags model.summary.id
|
||||
|
||||
|
||||
|
||||
--- View
|
||||
|
||||
|
||||
view : Flags -> Texts -> Model -> Html Msg
|
||||
view flags texts model =
|
||||
let
|
||||
dlTypeSettings =
|
||||
{ display = texts.downloadFileType
|
||||
, icon = \_ -> Nothing
|
||||
, selectPlaceholder = ""
|
||||
, style = DS.mainStyle
|
||||
}
|
||||
|
||||
byteStr n =
|
||||
Util.Size.bytesReadable Util.Size.B (toFloat n)
|
||||
in
|
||||
case Data.DownloadAllState.fromString model.summary.state of
|
||||
Nothing ->
|
||||
div [ class "flex flex-col animate-pulse space-y-4 px-2 my-2" ]
|
||||
[ div [ class "h-2 border dark:border-slate-600 bg-gray-100 dark:bg-slate-600" ]
|
||||
[]
|
||||
, div [ class "h-2 border dark:border-slate-600 bg-gray-100 dark:bg-slate-600" ]
|
||||
[]
|
||||
, div [ class "h-8 border dark:border-slate-600 bg-gray-100 dark:bg-slate-600" ]
|
||||
[]
|
||||
, div [ class "flex flex-row space-x-4 " ]
|
||||
[ div [ class "h-10 w-32 dark:border-slate-600 bg-gray-100 dark:bg-slate-600" ]
|
||||
[]
|
||||
, div [ class "h-10 w-32 dark:border-slate-600 bg-gray-100 dark:bg-slate-600" ]
|
||||
[]
|
||||
]
|
||||
]
|
||||
|
||||
Just Data.DownloadAllState.Empty ->
|
||||
div
|
||||
[ class "flex flex-col relative px-2"
|
||||
]
|
||||
[ div
|
||||
[ class S.infoMessage
|
||||
]
|
||||
[ text texts.noResults
|
||||
]
|
||||
, div [ class "flex flex-row py-2" ]
|
||||
[ a
|
||||
[ class S.secondaryButton
|
||||
, href "#"
|
||||
, onClick CloseAction
|
||||
]
|
||||
[ i [ class "fa fa-times mr-2" ] []
|
||||
, text texts.close
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
Just state ->
|
||||
div [ class "flex flex-col relative px-2" ]
|
||||
[ B.loadingDimmer
|
||||
{ active = state == Data.DownloadAllState.Preparing
|
||||
, label = texts.downloadPreparing
|
||||
}
|
||||
, div
|
||||
[ classList [ ( "hidden", state == Data.DownloadAllState.Forbidden ) ]
|
||||
]
|
||||
[ text
|
||||
(texts.summary
|
||||
model.summary.fileCount
|
||||
(byteStr model.summary.uncompressedSize)
|
||||
)
|
||||
]
|
||||
, div
|
||||
[ classList [ ( "hidden", state /= Data.DownloadAllState.Forbidden ) ]
|
||||
, class S.errorMessage
|
||||
]
|
||||
[ text texts.downloadTooLarge
|
||||
, text " "
|
||||
, text <|
|
||||
texts.downloadConfigText
|
||||
flags.config.downloadAllMaxFiles
|
||||
flags.config.downloadAllMaxSize
|
||||
model.summary.uncompressedSize
|
||||
]
|
||||
, div
|
||||
[ class "mt-3"
|
||||
, classList [ ( "hidden", model.accessMode /= AccessUser ) ]
|
||||
]
|
||||
[ label [ class S.inputLabel ]
|
||||
[ text texts.downloadFileTypeLabel
|
||||
]
|
||||
, Html.map DlTypeMsg
|
||||
(Comp.FixedDropdown.viewStyled2
|
||||
dlTypeSettings
|
||||
False
|
||||
(Just model.dlType)
|
||||
model.dlTypeDropdown
|
||||
)
|
||||
]
|
||||
, div
|
||||
[ class "my-2"
|
||||
, classList [ ( "hidden", state /= Data.DownloadAllState.Present ) ]
|
||||
]
|
||||
[ text texts.downloadReady
|
||||
]
|
||||
, div
|
||||
[ class "my-2 "
|
||||
, classList [ ( "hidden", state /= Data.DownloadAllState.NotPresent ) ]
|
||||
]
|
||||
[ text texts.downloadCreateText
|
||||
]
|
||||
, div [ class "flex flex-row py-2 items-center" ]
|
||||
[ a
|
||||
[ class S.primaryButton
|
||||
, disabled (state /= Data.DownloadAllState.NotPresent && state /= Data.DownloadAllState.Present)
|
||||
, classList [ ( "disabled", state /= Data.DownloadAllState.NotPresent && state /= Data.DownloadAllState.Present ) ]
|
||||
, if state == Data.DownloadAllState.Present then
|
||||
href (downloadLink flags model)
|
||||
|
||||
else
|
||||
href "#"
|
||||
, if state == Data.DownloadAllState.NotPresent then
|
||||
onClick SubmitAction
|
||||
|
||||
else
|
||||
class ""
|
||||
]
|
||||
[ case state of
|
||||
Data.DownloadAllState.Present ->
|
||||
text texts.downloadNow
|
||||
|
||||
Data.DownloadAllState.NotPresent ->
|
||||
text texts.downloadCreate
|
||||
|
||||
Data.DownloadAllState.Preparing ->
|
||||
text texts.downloadPreparing
|
||||
|
||||
Data.DownloadAllState.Forbidden ->
|
||||
text "N./A."
|
||||
|
||||
Data.DownloadAllState.Empty ->
|
||||
text "N./A."
|
||||
]
|
||||
, a
|
||||
[ class S.secondaryButton
|
||||
, class "ml-2"
|
||||
, href "#"
|
||||
, onClick CloseAction
|
||||
]
|
||||
[ i [ class "fa fa-times mr-2" ] []
|
||||
, text texts.close
|
||||
]
|
||||
, div
|
||||
[ class "h-full ml-3"
|
||||
, classList [ ( "hidden", not model.loading ) ]
|
||||
]
|
||||
[ i [ class "fa fa-circle-notch animate-spin" ] []
|
||||
]
|
||||
]
|
||||
]
|
49
modules/webapp/src/main/elm/Data/DownloadAllState.elm
Normal file
49
modules/webapp/src/main/elm/Data/DownloadAllState.elm
Normal file
@ -0,0 +1,49 @@
|
||||
{-
|
||||
Copyright 2020 Eike K. & Contributors
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
-}
|
||||
|
||||
|
||||
module Data.DownloadAllState exposing (DownloadAllState(..), all, asString, fromString)
|
||||
|
||||
|
||||
type DownloadAllState
|
||||
= NotPresent
|
||||
| Forbidden
|
||||
| Empty
|
||||
| Preparing
|
||||
| Present
|
||||
|
||||
|
||||
all : List DownloadAllState
|
||||
all =
|
||||
[ NotPresent, Forbidden, Empty, Preparing, Present ]
|
||||
|
||||
|
||||
asString : DownloadAllState -> String
|
||||
asString st =
|
||||
case st of
|
||||
NotPresent ->
|
||||
"notpresent"
|
||||
|
||||
Forbidden ->
|
||||
"forbidden"
|
||||
|
||||
Empty ->
|
||||
"empty"
|
||||
|
||||
Preparing ->
|
||||
"preparing"
|
||||
|
||||
Present ->
|
||||
"present"
|
||||
|
||||
|
||||
fromString : String -> Maybe DownloadAllState
|
||||
fromString str =
|
||||
let
|
||||
name =
|
||||
String.toLower str
|
||||
in
|
||||
List.filter (\e -> asString e == name) all |> List.head
|
41
modules/webapp/src/main/elm/Data/DownloadFileType.elm
Normal file
41
modules/webapp/src/main/elm/Data/DownloadFileType.elm
Normal file
@ -0,0 +1,41 @@
|
||||
{-
|
||||
Copyright 2020 Eike K. & Contributors
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
-}
|
||||
|
||||
|
||||
module Data.DownloadFileType exposing (DownloadFileType(..), all, asString, fromString)
|
||||
|
||||
|
||||
type DownloadFileType
|
||||
= Converted
|
||||
| Originals
|
||||
|
||||
|
||||
all : List DownloadFileType
|
||||
all =
|
||||
[ Converted, Originals ]
|
||||
|
||||
|
||||
asString : DownloadFileType -> String
|
||||
asString ft =
|
||||
case ft of
|
||||
Converted ->
|
||||
"converted"
|
||||
|
||||
Originals ->
|
||||
"original"
|
||||
|
||||
|
||||
fromString : String -> Maybe DownloadFileType
|
||||
fromString str =
|
||||
case String.toLower str of
|
||||
"converted" ->
|
||||
Just Converted
|
||||
|
||||
"originals" ->
|
||||
Just Originals
|
||||
|
||||
_ ->
|
||||
Nothing
|
@ -35,6 +35,8 @@ type alias Config =
|
||||
, maxPageSize : Int
|
||||
, maxNoteLength : Int
|
||||
, showClassificationSettings : Bool
|
||||
, downloadAllMaxFiles : Int
|
||||
, downloadAllMaxSize : Int
|
||||
, openIdAuth : List OpenIdAuth
|
||||
}
|
||||
|
||||
|
113
modules/webapp/src/main/elm/Messages/Comp/DownloadAll.elm
Normal file
113
modules/webapp/src/main/elm/Messages/Comp/DownloadAll.elm
Normal file
@ -0,0 +1,113 @@
|
||||
{-
|
||||
Copyright 2020 Eike K. & Contributors
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
-}
|
||||
|
||||
|
||||
module Messages.Comp.DownloadAll exposing (Texts, de, fr, gb)
|
||||
|
||||
import Messages.Data.DownloadFileType
|
||||
import Util.Size
|
||||
|
||||
|
||||
type alias Texts =
|
||||
{ downloadFileType : Messages.Data.DownloadFileType.Texts
|
||||
, downloadFileTypeLabel : String
|
||||
, noResults : String
|
||||
, summary : Int -> String -> String
|
||||
, close : String
|
||||
, downloadPreparing : String
|
||||
, downloadTooLarge : String
|
||||
, downloadConfigText : Int -> Int -> Int -> String
|
||||
, downloadReady : String
|
||||
, downloadCreateText : String
|
||||
, downloadCreate : String
|
||||
, downloadNow : String
|
||||
}
|
||||
|
||||
|
||||
byteStr : Int -> String
|
||||
byteStr n =
|
||||
Util.Size.bytesReadable Util.Size.B (toFloat n)
|
||||
|
||||
|
||||
gb : Texts
|
||||
gb =
|
||||
{ downloadFileType = Messages.Data.DownloadFileType.gb
|
||||
, downloadFileTypeLabel = "What files"
|
||||
, noResults = "No results to download."
|
||||
, summary = \files -> \size -> "Download consists of " ++ String.fromInt files ++ " files (" ++ size ++ ")."
|
||||
, close = "Close"
|
||||
, downloadPreparing = "Download is being prepared…"
|
||||
, downloadTooLarge = "The download is too large."
|
||||
, downloadConfigText =
|
||||
\maxNum ->
|
||||
\maxSize ->
|
||||
\curSize ->
|
||||
"The maximum number of files allowed is "
|
||||
++ String.fromInt maxNum
|
||||
++ " and maximum size is "
|
||||
++ byteStr maxSize
|
||||
++ " (current size would be "
|
||||
++ byteStr curSize
|
||||
++ "). "
|
||||
, downloadReady = "Donwload is ready!"
|
||||
, downloadCreateText = "You can create the download at the server. Once it is ready, the button will download the zip file."
|
||||
, downloadCreate = "Create download"
|
||||
, downloadNow = "Download now!"
|
||||
}
|
||||
|
||||
|
||||
de : Texts
|
||||
de =
|
||||
{ downloadFileType = Messages.Data.DownloadFileType.de
|
||||
, downloadFileTypeLabel = "Welche Dateien"
|
||||
, noResults = "Keine Ergebnisse zum Herunterladen."
|
||||
, summary = \files -> \size -> "Download besteht aus " ++ String.fromInt files ++ " Dateien (" ++ size ++ ")."
|
||||
, close = "Schließen"
|
||||
, downloadPreparing = "Der Download wird erstellt…"
|
||||
, downloadTooLarge = "Der Download ist zu groß."
|
||||
, downloadConfigText =
|
||||
\maxNum ->
|
||||
\maxSize ->
|
||||
\curSize ->
|
||||
"Es können maximal "
|
||||
++ String.fromInt maxNum
|
||||
++ " Dateien mit einer Gesamtgröße von "
|
||||
++ byteStr maxSize
|
||||
++ " erstellt werden (aktuelle Größe wäre "
|
||||
++ byteStr curSize
|
||||
++ "). "
|
||||
, downloadReady = "Donwload ist fertig!"
|
||||
, downloadCreateText = "Der Download kann auf dem Server erzeugt werden. Sobald die ZIP Datei fertig ist, kann sie hier heruntergeladen werden."
|
||||
, downloadCreate = "Download erstellen"
|
||||
, downloadNow = "Jetzt herunterladen"
|
||||
}
|
||||
|
||||
|
||||
fr : Texts
|
||||
fr =
|
||||
{ downloadFileType = Messages.Data.DownloadFileType.fr
|
||||
, downloadFileTypeLabel = "Quels fichiers"
|
||||
, noResults = "No results to download"
|
||||
, summary = \files -> \size -> "Download consists of " ++ String.fromInt files ++ " files (" ++ size ++ ")."
|
||||
, close = "Fermer"
|
||||
, downloadPreparing = "Le téléchargement est créé…"
|
||||
, downloadTooLarge = "Le téléchargement est trop important."
|
||||
, downloadConfigText =
|
||||
\maxNum ->
|
||||
\maxSize ->
|
||||
\curSize ->
|
||||
"Il est possible de créer au maximum "
|
||||
++ String.fromInt maxNum
|
||||
++ " fichiers d'une taille totale de "
|
||||
++ byteStr maxSize
|
||||
++ " (la taille actuelle serait de "
|
||||
++ byteStr curSize
|
||||
++ "). "
|
||||
, downloadReady = "Le téléchargement est achevé."
|
||||
, downloadCreateText = "Vous pouvez créer le téléchargement sur le serveur. Une fois qu'il est prêt, le bouton téléchargera le fichier zip."
|
||||
, downloadCreate = "Créer Télécharger"
|
||||
, downloadNow = "Télécharger l'archive!"
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
{-
|
||||
Copyright 2020 Eike K. & Contributors
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
-}
|
||||
|
||||
|
||||
module Messages.Data.DownloadFileType exposing (Texts, de, fr, gb)
|
||||
|
||||
import Data.DownloadFileType exposing (DownloadFileType(..))
|
||||
|
||||
|
||||
type alias Texts =
|
||||
DownloadFileType -> String
|
||||
|
||||
|
||||
gb : Texts
|
||||
gb ft =
|
||||
case ft of
|
||||
Converted ->
|
||||
"Converted PDF files"
|
||||
|
||||
Originals ->
|
||||
"Original files"
|
||||
|
||||
|
||||
de : Texts
|
||||
de ft =
|
||||
case ft of
|
||||
Converted ->
|
||||
"Konvertierte PDF Dateien"
|
||||
|
||||
Originals ->
|
||||
"Original Dateien"
|
||||
|
||||
|
||||
fr : Texts
|
||||
fr ft =
|
||||
case ft of
|
||||
Converted ->
|
||||
"Fichiers PDF convertis"
|
||||
|
||||
Originals ->
|
||||
"Fichiers originaux"
|
@ -15,6 +15,7 @@ module Messages.Page.Search exposing
|
||||
import Data.TimeZone exposing (TimeZone)
|
||||
import Messages.Basics
|
||||
import Messages.Comp.BookmarkQueryManage
|
||||
import Messages.Comp.DownloadAll
|
||||
import Messages.Comp.ItemCardList
|
||||
import Messages.Comp.ItemMerge
|
||||
import Messages.Comp.PublishItems
|
||||
@ -30,6 +31,7 @@ type alias Texts =
|
||||
, itemMerge : Messages.Comp.ItemMerge.Texts
|
||||
, publishItems : Messages.Comp.PublishItems.Texts
|
||||
, bookmarkManage : Messages.Comp.BookmarkQueryManage.Texts
|
||||
, downloadAllComp : Messages.Comp.DownloadAll.Texts
|
||||
, contentSearch : String
|
||||
, searchInNames : String
|
||||
, selectModeTitle : String
|
||||
@ -76,6 +78,8 @@ type alias Texts =
|
||||
, linkItemsSuccessful : String
|
||||
, linkItemsInProcess : String
|
||||
, linkItemsHeader : String
|
||||
, downloadAll : String
|
||||
, downloadAllQueryNeeded : String
|
||||
}
|
||||
|
||||
|
||||
@ -88,6 +92,7 @@ gb tz =
|
||||
, itemMerge = Messages.Comp.ItemMerge.gb tz
|
||||
, publishItems = Messages.Comp.PublishItems.gb tz
|
||||
, bookmarkManage = Messages.Comp.BookmarkQueryManage.gb
|
||||
, downloadAllComp = Messages.Comp.DownloadAll.gb
|
||||
, contentSearch = "Content search…"
|
||||
, searchInNames = "Search in names…"
|
||||
, selectModeTitle = "Select Mode"
|
||||
@ -134,6 +139,8 @@ gb tz =
|
||||
, linkItemsInProcess = "Linking items ..."
|
||||
, mergeHeader = "Merge Items"
|
||||
, linkItemsHeader = "Link Items"
|
||||
, downloadAll = "Download all"
|
||||
, downloadAllQueryNeeded = "Apply a criteria to reduce what to download."
|
||||
}
|
||||
|
||||
|
||||
@ -146,6 +153,7 @@ de tz =
|
||||
, itemMerge = Messages.Comp.ItemMerge.de tz
|
||||
, publishItems = Messages.Comp.PublishItems.de tz
|
||||
, bookmarkManage = Messages.Comp.BookmarkQueryManage.de
|
||||
, downloadAllComp = Messages.Comp.DownloadAll.de
|
||||
, contentSearch = "Volltextsuche…"
|
||||
, searchInNames = "Suche in Namen…"
|
||||
, selectModeTitle = "Auswahlmodus"
|
||||
@ -192,6 +200,8 @@ de tz =
|
||||
, linkItemsInProcess = "Dokumente werden verknüpft ..."
|
||||
, mergeHeader = "Dokumente zusammenführen"
|
||||
, linkItemsHeader = "Dokument verknüpfen"
|
||||
, downloadAll = "Alle herunterladen"
|
||||
, downloadAllQueryNeeded = "Alles kann nicht heruntergeladen werden, es muss etwas gesucht werden."
|
||||
}
|
||||
|
||||
|
||||
@ -204,6 +214,7 @@ fr tz =
|
||||
, itemMerge = Messages.Comp.ItemMerge.fr tz
|
||||
, publishItems = Messages.Comp.PublishItems.fr tz
|
||||
, bookmarkManage = Messages.Comp.BookmarkQueryManage.fr
|
||||
, downloadAllComp = Messages.Comp.DownloadAll.fr
|
||||
, contentSearch = "Recherche..."
|
||||
, searchInNames = "Recherche par nom..."
|
||||
, selectModeTitle = "Select Mode"
|
||||
@ -250,4 +261,6 @@ fr tz =
|
||||
, linkItemsInProcess = "Relier en cours ..."
|
||||
, mergeHeader = "Fusionner des documents"
|
||||
, linkItemsHeader = "Lier des documents"
|
||||
, downloadAll = "Télécharger tout"
|
||||
, downloadAllQueryNeeded = "Tout ne peut pas être téléchargé, il faut chercher quelque chose."
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ module Messages.Page.Share exposing (..)
|
||||
import Data.TimeZone exposing (TimeZone)
|
||||
import Http
|
||||
import Messages.Basics
|
||||
import Messages.Comp.DownloadAll
|
||||
import Messages.Comp.HttpError
|
||||
import Messages.Comp.ItemCardList
|
||||
import Messages.Comp.SearchMenu
|
||||
@ -21,6 +22,7 @@ type alias Texts =
|
||||
, basics : Messages.Basics.Texts
|
||||
, itemCardList : Messages.Comp.ItemCardList.Texts
|
||||
, passwordForm : Messages.Comp.SharePasswordForm.Texts
|
||||
, downloadAll : Messages.Comp.DownloadAll.Texts
|
||||
, httpError : Http.Error -> String
|
||||
, authFailed : String
|
||||
, fulltextPlaceholder : String
|
||||
@ -30,6 +32,7 @@ type alias Texts =
|
||||
, showItemGroups : String
|
||||
, listView : String
|
||||
, tileView : String
|
||||
, downloadAllLabel : String
|
||||
}
|
||||
|
||||
|
||||
@ -39,6 +42,7 @@ gb tz =
|
||||
, basics = Messages.Basics.gb
|
||||
, itemCardList = Messages.Comp.ItemCardList.gb tz
|
||||
, passwordForm = Messages.Comp.SharePasswordForm.gb
|
||||
, downloadAll = Messages.Comp.DownloadAll.gb
|
||||
, authFailed = "This share does not exist."
|
||||
, httpError = Messages.Comp.HttpError.gb
|
||||
, fulltextPlaceholder = "Fulltext search…"
|
||||
@ -48,6 +52,7 @@ gb tz =
|
||||
, showItemGroups = "Group by month"
|
||||
, listView = "List view"
|
||||
, tileView = "Tile view"
|
||||
, downloadAllLabel = "Download all"
|
||||
}
|
||||
|
||||
|
||||
@ -57,6 +62,7 @@ de tz =
|
||||
, basics = Messages.Basics.de
|
||||
, itemCardList = Messages.Comp.ItemCardList.de tz
|
||||
, passwordForm = Messages.Comp.SharePasswordForm.de
|
||||
, downloadAll = Messages.Comp.DownloadAll.de
|
||||
, authFailed = "Diese Freigabe existiert nicht."
|
||||
, httpError = Messages.Comp.HttpError.de
|
||||
, fulltextPlaceholder = "Volltextsuche…"
|
||||
@ -66,6 +72,7 @@ de tz =
|
||||
, showItemGroups = "nach Monat gruppieren"
|
||||
, listView = "Listenansicht"
|
||||
, tileView = "Kachelansicht"
|
||||
, downloadAllLabel = "Alles herunterladen"
|
||||
}
|
||||
|
||||
|
||||
@ -75,6 +82,7 @@ fr tz =
|
||||
, basics = Messages.Basics.fr
|
||||
, itemCardList = Messages.Comp.ItemCardList.fr tz
|
||||
, passwordForm = Messages.Comp.SharePasswordForm.fr
|
||||
, downloadAll = Messages.Comp.DownloadAll.fr
|
||||
, authFailed = "Ce partage n'existe pas."
|
||||
, httpError = Messages.Comp.HttpError.fr
|
||||
, fulltextPlaceholder = "Recherche en texte entier..."
|
||||
@ -84,4 +92,5 @@ fr tz =
|
||||
, showItemGroups = "Grouper par mois"
|
||||
, listView = "Affichage liste"
|
||||
, tileView = "Affichage tuile"
|
||||
, downloadAllLabel = "Télécharger tout"
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ module Page exposing
|
||||
, isOpen
|
||||
, isSearchPage
|
||||
, isSecured
|
||||
, isSharePage
|
||||
, loginPage
|
||||
, loginPageReferrer
|
||||
, pageFromString
|
||||
@ -144,6 +145,16 @@ loginPage p =
|
||||
LoginPage { emptyLoginData | referrer = Just p }
|
||||
|
||||
|
||||
isSharePage : Page -> Bool
|
||||
isSharePage page =
|
||||
case page of
|
||||
SharePage _ ->
|
||||
True
|
||||
|
||||
_ ->
|
||||
False
|
||||
|
||||
|
||||
isSearchPage : Page -> Bool
|
||||
isSearchPage page =
|
||||
case page of
|
||||
|
@ -33,6 +33,7 @@ import Api.Model.ItemLightList exposing (ItemLightList)
|
||||
import Api.Model.SearchStats exposing (SearchStats)
|
||||
import Browser.Dom as Dom
|
||||
import Comp.BookmarkQueryManage
|
||||
import Comp.DownloadAll
|
||||
import Comp.ItemCardList
|
||||
import Comp.ItemDetail.FormChange exposing (FormChange)
|
||||
import Comp.ItemDetail.MultiEditMenu exposing (SaveNameState(..))
|
||||
@ -76,6 +77,7 @@ type alias Model =
|
||||
|
||||
type TopWidgetModel
|
||||
= TopWidgetHidden
|
||||
| DownloadAll Comp.DownloadAll.Model
|
||||
| BookmarkQuery Comp.BookmarkQueryManage.Model
|
||||
|
||||
|
||||
@ -239,7 +241,9 @@ type Msg
|
||||
| ToggleArrange ItemArrange
|
||||
| ToggleExpandCollapseRows
|
||||
| ToggleBookmarkCurrentQueryView
|
||||
| ToggleDownloadAllView
|
||||
| BookmarkQueryMsg Comp.BookmarkQueryManage.Msg
|
||||
| DownloadAllMsg Comp.DownloadAll.Msg
|
||||
| ItemSelectionChanged
|
||||
|
||||
|
||||
|
@ -13,6 +13,7 @@ module Page.Search.Update exposing
|
||||
import Api
|
||||
import Api.Model.ItemLightList exposing (ItemLightList)
|
||||
import Comp.BookmarkQueryManage
|
||||
import Comp.DownloadAll
|
||||
import Comp.ItemCardList
|
||||
import Comp.ItemDetail.FormChange exposing (FormChange(..))
|
||||
import Comp.ItemDetail.MultiEditMenu exposing (SaveNameState(..))
|
||||
@ -892,14 +893,46 @@ update texts bookmarkId lastViewedItemId env msg model =
|
||||
Nothing ->
|
||||
resultModelCmd env.selectedItems ( model, Cmd.none )
|
||||
|
||||
ToggleDownloadAllView ->
|
||||
case createQuery env.selectedItems model of
|
||||
Just q ->
|
||||
case model.topWidgetModel of
|
||||
DownloadAll _ ->
|
||||
resultModelCmd env.selectedItems
|
||||
( { model
|
||||
| topWidgetModel = TopWidgetHidden
|
||||
, viewMenuOpen = False
|
||||
}
|
||||
, Cmd.none
|
||||
)
|
||||
|
||||
_ ->
|
||||
let
|
||||
( qm, qc ) =
|
||||
Comp.DownloadAll.init Comp.DownloadAll.AccessUser env.flags (Q.render q)
|
||||
in
|
||||
resultModelCmd env.selectedItems
|
||||
( { model | topWidgetModel = DownloadAll qm, viewMenuOpen = False }
|
||||
, Cmd.map DownloadAllMsg qc
|
||||
)
|
||||
|
||||
Nothing ->
|
||||
resultModelCmd env.selectedItems ( model, Cmd.none )
|
||||
|
||||
ToggleBookmarkCurrentQueryView ->
|
||||
case createQuery env.selectedItems model of
|
||||
Just q ->
|
||||
case model.topWidgetModel of
|
||||
BookmarkQuery _ ->
|
||||
resultModelCmd env.selectedItems ( { model | topWidgetModel = TopWidgetHidden, viewMenuOpen = False }, Cmd.none )
|
||||
resultModelCmd env.selectedItems
|
||||
( { model
|
||||
| topWidgetModel = TopWidgetHidden
|
||||
, viewMenuOpen = False
|
||||
}
|
||||
, Cmd.none
|
||||
)
|
||||
|
||||
TopWidgetHidden ->
|
||||
_ ->
|
||||
let
|
||||
( qm, qc ) =
|
||||
Comp.BookmarkQueryManage.init (Q.render q)
|
||||
@ -947,7 +980,30 @@ update texts bookmarkId lastViewedItemId env msg model =
|
||||
, Sub.map BookmarkQueryMsg res.sub
|
||||
)
|
||||
|
||||
TopWidgetHidden ->
|
||||
_ ->
|
||||
resultModelCmd env.selectedItems ( model, Cmd.none )
|
||||
|
||||
DownloadAllMsg lm ->
|
||||
case model.topWidgetModel of
|
||||
DownloadAll bm ->
|
||||
let
|
||||
res =
|
||||
Comp.DownloadAll.update env.flags lm bm
|
||||
|
||||
nextModel =
|
||||
if res.closed then
|
||||
TopWidgetHidden
|
||||
|
||||
else
|
||||
DownloadAll res.model
|
||||
in
|
||||
makeResult env.selectedItems
|
||||
( { model | topWidgetModel = nextModel }
|
||||
, Cmd.map DownloadAllMsg res.cmd
|
||||
, Sub.none
|
||||
)
|
||||
|
||||
_ ->
|
||||
resultModelCmd env.selectedItems ( model, Cmd.none )
|
||||
|
||||
PublishViewMsg lmsg ->
|
||||
|
@ -11,6 +11,7 @@ import Api
|
||||
import Comp.Basic as B
|
||||
import Comp.BookmarkQueryManage
|
||||
import Comp.ConfirmModal
|
||||
import Comp.DownloadAll
|
||||
import Comp.ItemCardList
|
||||
import Comp.ItemMerge
|
||||
import Comp.MenuBar as MB
|
||||
@ -108,7 +109,7 @@ mainView texts env model =
|
||||
|
||||
|
||||
bookmarkQueryWidget : Texts -> UiSettings -> Flags -> Model -> List (Html Msg)
|
||||
bookmarkQueryWidget texts _ _ model =
|
||||
bookmarkQueryWidget texts _ flags model =
|
||||
case model.topWidgetModel of
|
||||
BookmarkQuery m ->
|
||||
[ div [ class "px-2 mb-4 border-l border-r border-b dark:border-slate-600" ]
|
||||
@ -116,6 +117,12 @@ bookmarkQueryWidget texts _ _ model =
|
||||
]
|
||||
]
|
||||
|
||||
DownloadAll m ->
|
||||
[ div [ class "mb-4 border-l border-r border-b dark:border-slate-600" ]
|
||||
[ Html.map DownloadAllMsg (Comp.DownloadAll.view flags texts.downloadAllComp m)
|
||||
]
|
||||
]
|
||||
|
||||
TopWidgetHidden ->
|
||||
[]
|
||||
|
||||
@ -437,6 +444,24 @@ defaultMenuBar texts env model =
|
||||
onClick ToggleBookmarkCurrentQueryView
|
||||
]
|
||||
}
|
||||
, { label = texts.downloadAll
|
||||
, icon = i [ class "fa fa-download" ] []
|
||||
, disabled = createQuery env.selectedItems model == Nothing
|
||||
, attrs =
|
||||
[ title <|
|
||||
if createQuery env.selectedItems model == Nothing then
|
||||
texts.downloadAllQueryNeeded
|
||||
|
||||
else
|
||||
texts.downloadAll
|
||||
, href "#"
|
||||
, if createQuery env.selectedItems model == Nothing then
|
||||
class ""
|
||||
|
||||
else
|
||||
onClick ToggleDownloadAllView
|
||||
]
|
||||
}
|
||||
, { label =
|
||||
if env.settings.cardPreviewFullWidth then
|
||||
texts.fullHeightPreviewTitle
|
||||
|
@ -5,13 +5,23 @@
|
||||
-}
|
||||
|
||||
|
||||
module Page.Share.Data exposing (Mode(..), Model, Msg(..), PageError(..), SearchBarMode(..), init, initCmd)
|
||||
module Page.Share.Data exposing
|
||||
( Mode(..)
|
||||
, Model
|
||||
, Msg(..)
|
||||
, PageError(..)
|
||||
, SearchBarMode(..)
|
||||
, TopContentModel(..)
|
||||
, init
|
||||
, initCmd
|
||||
)
|
||||
|
||||
import Api
|
||||
import Api.Model.ItemLightList exposing (ItemLightList)
|
||||
import Api.Model.SearchStats exposing (SearchStats)
|
||||
import Api.Model.ShareSecret exposing (ShareSecret)
|
||||
import Api.Model.ShareVerifyResult exposing (ShareVerifyResult)
|
||||
import Comp.DownloadAll
|
||||
import Comp.ItemCardList
|
||||
import Comp.PowerSearchInput
|
||||
import Comp.SearchMenu
|
||||
@ -42,6 +52,11 @@ type SearchBarMode
|
||||
| SearchBarContent
|
||||
|
||||
|
||||
type TopContentModel
|
||||
= TopContentHidden
|
||||
| TopContentDownload Comp.DownloadAll.Model
|
||||
|
||||
|
||||
type alias Model =
|
||||
{ mode : Mode
|
||||
, verifyResult : ShareVerifyResult
|
||||
@ -61,6 +76,7 @@ type alias Model =
|
||||
, arrange : ItemArrange
|
||||
, rowsOpen : Set String
|
||||
}
|
||||
, topContent : TopContentModel
|
||||
}
|
||||
|
||||
|
||||
@ -84,6 +100,7 @@ emptyModel flags =
|
||||
, arrange = Data.ItemArrange.Cards
|
||||
, rowsOpen = Set.empty
|
||||
}
|
||||
, topContent = TopContentHidden
|
||||
}
|
||||
|
||||
|
||||
@ -122,3 +139,5 @@ type Msg
|
||||
| ToggleViewMenu
|
||||
| ToggleArrange ItemArrange
|
||||
| ToggleShowGroups
|
||||
| DownloadAllMsg Comp.DownloadAll.Msg
|
||||
| ToggleDownloadAll
|
||||
|
@ -146,6 +146,15 @@ view texts flags model =
|
||||
, onClick (ToggleArrange Data.ItemArrange.Cards)
|
||||
]
|
||||
}
|
||||
, { label = texts.downloadAllLabel
|
||||
, icon = i [ class "fa fa-download" ] []
|
||||
, disabled = False
|
||||
, attrs =
|
||||
[ title texts.downloadAllLabel
|
||||
, href "#"
|
||||
, onClick ToggleDownloadAll
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
|
28
modules/webapp/src/main/elm/Page/Share/TopContent.elm
Normal file
28
modules/webapp/src/main/elm/Page/Share/TopContent.elm
Normal file
@ -0,0 +1,28 @@
|
||||
{-
|
||||
Copyright 2020 Eike K. & Contributors
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
-}
|
||||
|
||||
|
||||
module Page.Share.TopContent exposing (view)
|
||||
|
||||
import Comp.DownloadAll
|
||||
import Data.Flags exposing (Flags)
|
||||
import Html exposing (Html, div, span, text)
|
||||
import Html.Attributes exposing (class)
|
||||
import Messages.Page.Share exposing (Texts)
|
||||
import Page.Share.Data exposing (Model, Msg(..), TopContentModel(..))
|
||||
|
||||
|
||||
view : Texts -> Flags -> Model -> Html Msg
|
||||
view texts flags model =
|
||||
case model.topContent of
|
||||
TopContentHidden ->
|
||||
span [ class "hidden" ] []
|
||||
|
||||
TopContentDownload dm ->
|
||||
div [ class "mb-4 border-l border-r border-b dark:border-slate-600" ]
|
||||
[ Html.map DownloadAllMsg
|
||||
(Comp.DownloadAll.view flags texts.downloadAll dm)
|
||||
]
|
@ -8,6 +8,7 @@
|
||||
module Page.Share.Update exposing (UpdateResult, update)
|
||||
|
||||
import Api
|
||||
import Comp.DownloadAll
|
||||
import Comp.ItemCardList
|
||||
import Comp.LinkTarget exposing (LinkTarget)
|
||||
import Comp.PowerSearchInput
|
||||
@ -19,7 +20,10 @@ import Data.ItemQuery as Q
|
||||
import Data.SearchMode
|
||||
import Data.UiSettings exposing (UiSettings)
|
||||
import Page.Share.Data exposing (..)
|
||||
import Process
|
||||
import Set
|
||||
import Task
|
||||
import Time
|
||||
import Util.Html
|
||||
import Util.Maybe
|
||||
import Util.Update
|
||||
@ -252,30 +256,97 @@ update flags settings shareId msg model =
|
||||
UiSettingsResp (Err _) ->
|
||||
noSub ( model, Cmd.none )
|
||||
|
||||
DownloadAllMsg lm ->
|
||||
case model.topContent of
|
||||
TopContentDownload dm ->
|
||||
let
|
||||
res =
|
||||
Comp.DownloadAll.update flags lm dm
|
||||
|
||||
nextModel =
|
||||
if res.closed then
|
||||
TopContentHidden
|
||||
|
||||
else
|
||||
TopContentDownload res.model
|
||||
|
||||
-- The share page can't use websockets (not authenticated) so need to poll
|
||||
-- for new download state
|
||||
checkSub =
|
||||
if Comp.DownloadAll.isPreparing res.model then
|
||||
Process.sleep 3500
|
||||
|> Task.perform (always (DownloadAllMsg Comp.DownloadAll.checkDownload))
|
||||
|
||||
else
|
||||
Cmd.none
|
||||
in
|
||||
{ model = { model | topContent = nextModel }
|
||||
, cmd =
|
||||
Cmd.batch
|
||||
[ Cmd.map DownloadAllMsg res.cmd
|
||||
, checkSub
|
||||
]
|
||||
, sub = Sub.none
|
||||
}
|
||||
|
||||
_ ->
|
||||
noSub ( model, Cmd.none )
|
||||
|
||||
ToggleDownloadAll ->
|
||||
let
|
||||
vm =
|
||||
model.viewMode
|
||||
|
||||
nextVm =
|
||||
{ vm | menuOpen = False }
|
||||
in
|
||||
case model.topContent of
|
||||
TopContentHidden ->
|
||||
let
|
||||
query =
|
||||
createQuery flags model
|
||||
|> Maybe.withDefault (Q.DateMs Q.Gt 0)
|
||||
|
||||
am =
|
||||
Comp.DownloadAll.AccessShare shareId
|
||||
|
||||
( dm, dc ) =
|
||||
Comp.DownloadAll.init am flags (Q.render query)
|
||||
in
|
||||
noSub ( { model | topContent = TopContentDownload dm, viewMode = nextVm }, Cmd.map DownloadAllMsg dc )
|
||||
|
||||
TopContentDownload _ ->
|
||||
noSub ( { model | topContent = TopContentHidden, viewMode = nextVm }, Cmd.none )
|
||||
|
||||
|
||||
noSub : ( Model, Cmd Msg ) -> UpdateResult
|
||||
noSub ( m, c ) =
|
||||
UpdateResult m c Sub.none
|
||||
|
||||
|
||||
createQuery : Flags -> Model -> Maybe Q.ItemQuery
|
||||
createQuery flags model =
|
||||
Q.and
|
||||
[ Comp.SearchMenu.getItemQuery Data.ItemIds.empty model.searchMenuModel
|
||||
, Maybe.map Q.Fragment <|
|
||||
case model.searchMode of
|
||||
SearchBarNormal ->
|
||||
Comp.PowerSearchInput.getSearchString model.powerSearchInput
|
||||
|
||||
SearchBarContent ->
|
||||
if flags.config.fullTextSearchEnabled then
|
||||
Maybe.map (Q.Contents >> Q.render) model.contentSearch
|
||||
|
||||
else
|
||||
Maybe.map (Q.AllNames >> Q.render) model.contentSearch
|
||||
]
|
||||
|
||||
|
||||
makeSearchCmd : Flags -> Bool -> Model -> Cmd Msg
|
||||
makeSearchCmd flags doInit model =
|
||||
let
|
||||
xq =
|
||||
Q.and
|
||||
[ Comp.SearchMenu.getItemQuery Data.ItemIds.empty model.searchMenuModel
|
||||
, Maybe.map Q.Fragment <|
|
||||
case model.searchMode of
|
||||
SearchBarNormal ->
|
||||
Comp.PowerSearchInput.getSearchString model.powerSearchInput
|
||||
|
||||
SearchBarContent ->
|
||||
if flags.config.fullTextSearchEnabled then
|
||||
Maybe.map (Q.Contents >> Q.render) model.contentSearch
|
||||
|
||||
else
|
||||
Maybe.map (Q.AllNames >> Q.render) model.contentSearch
|
||||
]
|
||||
createQuery flags model
|
||||
|
||||
request mq =
|
||||
{ offset = Nothing
|
||||
|
@ -19,6 +19,7 @@ import Page.Share.Data exposing (..)
|
||||
import Page.Share.Menubar as Menubar
|
||||
import Page.Share.Results as Results
|
||||
import Page.Share.Sidebar as Sidebar
|
||||
import Page.Share.TopContent as TopContent
|
||||
import Styles as S
|
||||
|
||||
|
||||
@ -80,6 +81,7 @@ mainContent texts flags shareId model =
|
||||
]
|
||||
, Menubar.view texts flags model
|
||||
, errorMessage texts model
|
||||
, TopContent.view texts flags model
|
||||
, Results.view texts model.uiSettings flags shareId model
|
||||
]
|
||||
|
||||
|
@ -395,7 +395,7 @@ editLinkTableCellStyle =
|
||||
|
||||
dimmer : String
|
||||
dimmer =
|
||||
" absolute top-0 left-0 w-full h-full bg-black bg-opacity-90 dark:bg-slate-900 dark:bg-opacity-90 z-50 flex flex-col items-center justify-center px-4 md:px-8 py-2 "
|
||||
" absolute top-0 left-0 w-full h-full bg-black bg-opacity-90 dark:bg-slate-900 dark:bg-opacity-90 z-30 flex flex-col items-center justify-center px-4 md:px-8 py-2 "
|
||||
|
||||
|
||||
dimmerLight : String
|
||||
|
Loading…
x
Reference in New Issue
Block a user