mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-06-22 10:28:27 +00:00
Add a file-repository for better organizing files
Docspell now must use a new api for accessing files. Issue: #1379
This commit is contained in:
@ -14,6 +14,7 @@ import fs2.Stream
|
|||||||
import docspell.backend.ops.OItemSearch._
|
import docspell.backend.ops.OItemSearch._
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.store._
|
import docspell.store._
|
||||||
|
import docspell.store.file.FileMetadata
|
||||||
import docspell.store.queries.{QAttachment, QItem}
|
import docspell.store.queries.{QAttachment, QItem}
|
||||||
import docspell.store.records._
|
import docspell.store.records._
|
||||||
|
|
||||||
@ -89,18 +90,21 @@ object OItemSearch {
|
|||||||
trait BinaryData[F[_]] {
|
trait BinaryData[F[_]] {
|
||||||
def data: Stream[F, Byte]
|
def data: Stream[F, Byte]
|
||||||
def name: Option[String]
|
def name: Option[String]
|
||||||
def meta: RFileMeta
|
def meta: FileMetadata
|
||||||
def fileId: Ident
|
def fileId: FileKey
|
||||||
}
|
}
|
||||||
case class AttachmentData[F[_]](ra: RAttachment, meta: RFileMeta, data: Stream[F, Byte])
|
case class AttachmentData[F[_]](
|
||||||
extends BinaryData[F] {
|
ra: RAttachment,
|
||||||
|
meta: FileMetadata,
|
||||||
|
data: Stream[F, Byte]
|
||||||
|
) extends BinaryData[F] {
|
||||||
val name = ra.name
|
val name = ra.name
|
||||||
val fileId = ra.fileId
|
val fileId = ra.fileId
|
||||||
}
|
}
|
||||||
|
|
||||||
case class AttachmentSourceData[F[_]](
|
case class AttachmentSourceData[F[_]](
|
||||||
rs: RAttachmentSource,
|
rs: RAttachmentSource,
|
||||||
meta: RFileMeta,
|
meta: FileMetadata,
|
||||||
data: Stream[F, Byte]
|
data: Stream[F, Byte]
|
||||||
) extends BinaryData[F] {
|
) extends BinaryData[F] {
|
||||||
val name = rs.name
|
val name = rs.name
|
||||||
@ -109,7 +113,7 @@ object OItemSearch {
|
|||||||
|
|
||||||
case class AttachmentPreviewData[F[_]](
|
case class AttachmentPreviewData[F[_]](
|
||||||
rs: RAttachmentPreview,
|
rs: RAttachmentPreview,
|
||||||
meta: RFileMeta,
|
meta: FileMetadata,
|
||||||
data: Stream[F, Byte]
|
data: Stream[F, Byte]
|
||||||
) extends BinaryData[F] {
|
) extends BinaryData[F] {
|
||||||
val name = rs.name
|
val name = rs.name
|
||||||
@ -118,7 +122,7 @@ object OItemSearch {
|
|||||||
|
|
||||||
case class AttachmentArchiveData[F[_]](
|
case class AttachmentArchiveData[F[_]](
|
||||||
rs: RAttachmentArchive,
|
rs: RAttachmentArchive,
|
||||||
meta: RFileMeta,
|
meta: FileMetadata,
|
||||||
data: Stream[F, Byte]
|
data: Stream[F, Byte]
|
||||||
) extends BinaryData[F] {
|
) extends BinaryData[F] {
|
||||||
val name = rs.name
|
val name = rs.name
|
||||||
@ -188,7 +192,7 @@ object OItemSearch {
|
|||||||
AttachmentData[F](
|
AttachmentData[F](
|
||||||
ra,
|
ra,
|
||||||
m,
|
m,
|
||||||
store.fileStore.getBytes(m.id)
|
store.fileRepo.getBytes(m.id)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -208,7 +212,7 @@ object OItemSearch {
|
|||||||
AttachmentSourceData[F](
|
AttachmentSourceData[F](
|
||||||
ra,
|
ra,
|
||||||
m,
|
m,
|
||||||
store.fileStore.getBytes(m.id)
|
store.fileRepo.getBytes(m.id)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -228,7 +232,7 @@ object OItemSearch {
|
|||||||
AttachmentPreviewData[F](
|
AttachmentPreviewData[F](
|
||||||
ra,
|
ra,
|
||||||
m,
|
m,
|
||||||
store.fileStore.getBytes(m.id)
|
store.fileRepo.getBytes(m.id)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -248,7 +252,7 @@ object OItemSearch {
|
|||||||
AttachmentPreviewData[F](
|
AttachmentPreviewData[F](
|
||||||
ra,
|
ra,
|
||||||
m,
|
m,
|
||||||
store.fileStore.getBytes(m.id)
|
store.fileRepo.getBytes(m.id)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -268,7 +272,7 @@ object OItemSearch {
|
|||||||
AttachmentArchiveData[F](
|
AttachmentArchiveData[F](
|
||||||
ra,
|
ra,
|
||||||
m,
|
m,
|
||||||
store.fileStore.getBytes(m.id)
|
store.fileRepo.getBytes(m.id)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -276,9 +280,11 @@ object OItemSearch {
|
|||||||
(None: Option[AttachmentArchiveData[F]]).pure[F]
|
(None: Option[AttachmentArchiveData[F]]).pure[F]
|
||||||
}
|
}
|
||||||
|
|
||||||
private def makeBinaryData[A](fileId: Ident)(f: RFileMeta => A): F[Option[A]] =
|
private def makeBinaryData[A](fileId: FileKey)(f: FileMetadata => A): F[Option[A]] =
|
||||||
store.fileStore
|
OptionT(
|
||||||
.findMeta(fileId)
|
store.fileRepo
|
||||||
|
.findMeta(fileId)
|
||||||
|
)
|
||||||
.map(fm => f(fm))
|
.map(fm => f(fm))
|
||||||
.value
|
.value
|
||||||
|
|
||||||
|
@ -249,7 +249,7 @@ object OMail {
|
|||||||
} yield {
|
} yield {
|
||||||
val addAttach = m.attach.filter(ras).map { a =>
|
val addAttach = m.attach.filter(ras).map { a =>
|
||||||
Attach[F](
|
Attach[F](
|
||||||
store.fileStore.getBytes(a._2.id)
|
store.fileRepo.getBytes(a._2.id)
|
||||||
).withFilename(a._1.name)
|
).withFilename(a._1.name)
|
||||||
.withLength(a._2.length.bytes)
|
.withLength(a._2.length.bytes)
|
||||||
.withMimeType(a._2.mimetype.toEmil)
|
.withMimeType(a._2.mimetype.toEmil)
|
||||||
|
@ -126,7 +126,7 @@ object OUpload {
|
|||||||
): F[OUpload.UploadResult] =
|
): F[OUpload.UploadResult] =
|
||||||
(for {
|
(for {
|
||||||
_ <- checkExistingItem(itemId, account.collective)
|
_ <- checkExistingItem(itemId, account.collective)
|
||||||
files <- right(data.files.traverse(saveFile).map(_.flatten))
|
files <- right(data.files.traverse(saveFile(account)).map(_.flatten))
|
||||||
_ <- checkFileList(files)
|
_ <- checkFileList(files)
|
||||||
lang <- data.meta.language match {
|
lang <- data.meta.language match {
|
||||||
case Some(lang) => right(lang.pure[F])
|
case Some(lang) => right(lang.pure[F])
|
||||||
@ -200,10 +200,18 @@ object OUpload {
|
|||||||
} yield UploadResult.Success
|
} yield UploadResult.Success
|
||||||
|
|
||||||
/** Saves the file into the database. */
|
/** Saves the file into the database. */
|
||||||
private def saveFile(file: File[F]): F[Option[ProcessItemArgs.File]] =
|
private def saveFile(
|
||||||
|
accountId: AccountId
|
||||||
|
)(file: File[F]): F[Option[ProcessItemArgs.File]] =
|
||||||
logger.finfo(s"Receiving file $file") *>
|
logger.finfo(s"Receiving file $file") *>
|
||||||
file.data
|
file.data
|
||||||
.through(store.fileStore.save(MimeTypeHint(file.name, None)))
|
.through(
|
||||||
|
store.fileRepo.save(
|
||||||
|
accountId.collective,
|
||||||
|
FileCategory.AttachmentSource,
|
||||||
|
MimeTypeHint(file.name, None)
|
||||||
|
)
|
||||||
|
)
|
||||||
.compile
|
.compile
|
||||||
.lastOrError
|
.lastOrError
|
||||||
.attempt
|
.attempt
|
||||||
|
@ -0,0 +1,46 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
|
||||||
|
import docspell.common
|
||||||
|
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
/** This is used to have a rough idea for what a file is used in the system. It is part of
|
||||||
|
* the file-key to identify a file, backends could ignore it, since the file-id (the last
|
||||||
|
* part of the file-key) should be globally unique anyways.
|
||||||
|
*/
|
||||||
|
sealed trait FileCategory { self: Product =>
|
||||||
|
final def id: Ident =
|
||||||
|
Ident.unsafe(self.productPrefix.toLowerCase)
|
||||||
|
|
||||||
|
def toFileKey(collective: Ident, fileId: Ident): FileKey =
|
||||||
|
common.FileKey(collective, this, fileId)
|
||||||
|
}
|
||||||
|
|
||||||
|
object FileCategory {
|
||||||
|
// Impl note: Changing constants here requires a database migration!
|
||||||
|
|
||||||
|
case object AttachmentSource extends FileCategory
|
||||||
|
case object AttachmentConvert extends FileCategory
|
||||||
|
case object PreviewImage extends FileCategory
|
||||||
|
case object Classifier extends FileCategory
|
||||||
|
|
||||||
|
val all: NonEmptyList[FileCategory] =
|
||||||
|
NonEmptyList.of(AttachmentSource, AttachmentConvert, PreviewImage, Classifier)
|
||||||
|
|
||||||
|
def fromString(str: String): Either[String, FileCategory] =
|
||||||
|
all.find(_.id.id == str).toRight(s"Unknown category: $str")
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[FileCategory] =
|
||||||
|
Decoder[String].emap(fromString)
|
||||||
|
|
||||||
|
implicit val jsonEncoder: Encoder[FileCategory] =
|
||||||
|
Encoder[String].contramap(_.id.id)
|
||||||
|
}
|
21
modules/common/src/main/scala/docspell/common/FileKey.scala
Normal file
21
modules/common/src/main/scala/docspell/common/FileKey.scala
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common
|
||||||
|
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
case class FileKey(collective: Ident, category: FileCategory, id: Ident)
|
||||||
|
|
||||||
|
object FileKey {
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[FileKey] =
|
||||||
|
deriveDecoder[FileKey]
|
||||||
|
|
||||||
|
implicit val jsonEncoder: Encoder[FileKey] =
|
||||||
|
deriveEncoder[FileKey]
|
||||||
|
}
|
@ -25,13 +25,15 @@ case class Ident(id: String) {
|
|||||||
!isEmpty
|
!isEmpty
|
||||||
|
|
||||||
def /(next: Ident): Ident =
|
def /(next: Ident): Ident =
|
||||||
new Ident(id + "." + next.id)
|
new Ident(id + Ident.concatChar + next.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
object Ident {
|
object Ident {
|
||||||
|
private val concatChar = '.'
|
||||||
implicit val identEq: Eq[Ident] =
|
implicit val identEq: Eq[Ident] =
|
||||||
Eq.by(_.id)
|
Eq.by(_.id)
|
||||||
|
|
||||||
|
// Note, the slash *must not* be part of valid characters
|
||||||
val chars: Set[Char] = (('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9') ++ "-_.@").toSet
|
val chars: Set[Char] = (('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9') ++ "-_.@").toSet
|
||||||
|
|
||||||
def randomUUID[F[_]: Sync]: F[Ident] =
|
def randomUUID[F[_]: Sync]: F[Ident] =
|
||||||
@ -75,5 +77,4 @@ object Ident {
|
|||||||
|
|
||||||
implicit val order: Order[Ident] =
|
implicit val order: Order[Ident] =
|
||||||
Order.by(_.id)
|
Order.by(_.id)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -60,7 +60,7 @@ object ProcessItemArgs {
|
|||||||
implicit val jsonDecoder: Decoder[ProcessMeta] = deriveDecoder[ProcessMeta]
|
implicit val jsonDecoder: Decoder[ProcessMeta] = deriveDecoder[ProcessMeta]
|
||||||
}
|
}
|
||||||
|
|
||||||
case class File(name: Option[String], fileMetaId: Ident)
|
case class File(name: Option[String], fileMetaId: FileKey)
|
||||||
object File {
|
object File {
|
||||||
implicit val jsonEncoder: Encoder[File] = deriveEncoder[File]
|
implicit val jsonEncoder: Encoder[File] = deriveEncoder[File]
|
||||||
implicit val jsonDecoder: Decoder[File] = deriveDecoder[File]
|
implicit val jsonDecoder: Decoder[File] = deriveDecoder[File]
|
||||||
|
@ -31,7 +31,7 @@ object Classify {
|
|||||||
_ <- OptionT.liftF(logger.info(s"Guessing label for ${cname.name} …"))
|
_ <- OptionT.liftF(logger.info(s"Guessing label for ${cname.name} …"))
|
||||||
model <- OptionT(store.transact(RClassifierModel.findByName(coll, cname.name)))
|
model <- OptionT(store.transact(RClassifierModel.findByName(coll, cname.name)))
|
||||||
.flatTapNone(logger.debug("No classifier model found."))
|
.flatTapNone(logger.debug("No classifier model found."))
|
||||||
modelData = store.fileStore.getBytes(model.fileId)
|
modelData = store.fileRepo.getBytes(model.fileId)
|
||||||
cls <- OptionT(File.withTempDir(workingDir, "classify").use { dir =>
|
cls <- OptionT(File.withTempDir(workingDir, "classify").use { dir =>
|
||||||
val modelFile = dir.resolve("model.ser.gz")
|
val modelFile = dir.resolve("model.ser.gz")
|
||||||
modelData
|
modelData
|
||||||
|
@ -91,7 +91,7 @@ object LearnClassifierTask {
|
|||||||
n <- ctx.store.transact(RClassifierModel.deleteAll(list.map(_.id)))
|
n <- ctx.store.transact(RClassifierModel.deleteAll(list.map(_.id)))
|
||||||
_ <- list
|
_ <- list
|
||||||
.map(_.fileId)
|
.map(_.fileId)
|
||||||
.traverse(id => ctx.store.fileStore.delete(id))
|
.traverse(id => ctx.store.fileRepo.delete(id))
|
||||||
_ <- ctx.logger.debug(s"Deleted $n model files.")
|
_ <- ctx.logger.debug(s"Deleted $n model files.")
|
||||||
} yield ()
|
} yield ()
|
||||||
|
|
||||||
|
@ -42,7 +42,12 @@ object StoreClassifierModel {
|
|||||||
_ <- logger.debug(s"Storing new trained model for: ${modelName.name}")
|
_ <- logger.debug(s"Storing new trained model for: ${modelName.name}")
|
||||||
fileData = Files[F].readAll(trainedModel.model)
|
fileData = Files[F].readAll(trainedModel.model)
|
||||||
newFileId <-
|
newFileId <-
|
||||||
fileData.through(store.fileStore.save(MimeTypeHint.none)).compile.lastOrError
|
fileData
|
||||||
|
.through(
|
||||||
|
store.fileRepo.save(collective, FileCategory.Classifier, MimeTypeHint.none)
|
||||||
|
)
|
||||||
|
.compile
|
||||||
|
.lastOrError
|
||||||
_ <- store.transact(
|
_ <- store.transact(
|
||||||
RClassifierModel.updateFile(collective, modelName.name, newFileId)
|
RClassifierModel.updateFile(collective, modelName.name, newFileId)
|
||||||
)
|
)
|
||||||
@ -50,7 +55,7 @@ object StoreClassifierModel {
|
|||||||
_ <- oldFile match {
|
_ <- oldFile match {
|
||||||
case Some(fid) =>
|
case Some(fid) =>
|
||||||
logger.debug(s"Deleting old model file ${fid.id}") *>
|
logger.debug(s"Deleting old model file ${fid.id}") *>
|
||||||
store.fileStore.delete(fid)
|
store.fileRepo.delete(fid)
|
||||||
case None => ().pure[F]
|
case None => ().pure[F]
|
||||||
}
|
}
|
||||||
} yield ()
|
} yield ()
|
||||||
|
@ -92,7 +92,7 @@ object PdfConvTask {
|
|||||||
ctx: Context[F, Args],
|
ctx: Context[F, Args],
|
||||||
in: RFileMeta
|
in: RFileMeta
|
||||||
): F[Unit] = {
|
): F[Unit] = {
|
||||||
val fs = ctx.store.fileStore
|
val fs = ctx.store.fileRepo
|
||||||
val data = fs.getBytes(in.id)
|
val data = fs.getBytes(in.id)
|
||||||
|
|
||||||
val storeResult: ConversionResult.Handler[F, Unit] =
|
val storeResult: ConversionResult.Handler[F, Unit] =
|
||||||
@ -141,11 +141,15 @@ object PdfConvTask {
|
|||||||
newFile: Stream[F, Byte]
|
newFile: Stream[F, Byte]
|
||||||
): F[Unit] = {
|
): F[Unit] = {
|
||||||
val mimeHint = MimeTypeHint.advertised(meta.mimetype)
|
val mimeHint = MimeTypeHint.advertised(meta.mimetype)
|
||||||
|
val collective = meta.id.collective
|
||||||
|
val cat = FileCategory.AttachmentConvert
|
||||||
for {
|
for {
|
||||||
fid <-
|
fid <-
|
||||||
newFile.through(ctx.store.fileStore.save(mimeHint)).compile.lastOrError
|
newFile
|
||||||
|
.through(ctx.store.fileRepo.save(collective, cat, mimeHint))
|
||||||
|
.compile
|
||||||
|
.lastOrError
|
||||||
_ <- ctx.store.transact(RAttachment.updateFileId(ctx.args.attachId, fid))
|
_ <- ctx.store.transact(RAttachment.updateFileId(ctx.args.attachId, fid))
|
||||||
} yield ()
|
} yield ()
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -100,5 +100,5 @@ object AttachmentPageCount {
|
|||||||
.getOrElse(MimeType.octetStream)
|
.getOrElse(MimeType.octetStream)
|
||||||
|
|
||||||
def loadFile[F[_]](ctx: Context[F, _])(ra: RAttachment): Stream[F, Byte] =
|
def loadFile[F[_]](ctx: Context[F, _])(ra: RAttachment): Stream[F, Byte] =
|
||||||
ctx.store.fileStore.getBytes(ra.fileId)
|
ctx.store.fileRepo.getBytes(ra.fileId)
|
||||||
}
|
}
|
||||||
|
@ -59,7 +59,7 @@ object AttachmentPreview {
|
|||||||
preview.previewPNG(loadFile(ctx)(ra)).flatMap {
|
preview.previewPNG(loadFile(ctx)(ra)).flatMap {
|
||||||
case Some(out) =>
|
case Some(out) =>
|
||||||
ctx.logger.debug("Preview generated, saving to database…") *>
|
ctx.logger.debug("Preview generated, saving to database…") *>
|
||||||
createRecord(ctx, out, ra).map(_.some)
|
createRecord(ctx, ra.fileId.collective, out, ra).map(_.some)
|
||||||
case None =>
|
case None =>
|
||||||
ctx.logger
|
ctx.logger
|
||||||
.info(s"Preview could not be generated. Maybe the pdf has no pages?") *>
|
.info(s"Preview could not be generated. Maybe the pdf has no pages?") *>
|
||||||
@ -73,6 +73,7 @@ object AttachmentPreview {
|
|||||||
|
|
||||||
private def createRecord[F[_]: Sync](
|
private def createRecord[F[_]: Sync](
|
||||||
ctx: Context[F, _],
|
ctx: Context[F, _],
|
||||||
|
collective: Ident,
|
||||||
png: Stream[F, Byte],
|
png: Stream[F, Byte],
|
||||||
ra: RAttachment
|
ra: RAttachment
|
||||||
): F[RAttachmentPreview] = {
|
): F[RAttachmentPreview] = {
|
||||||
@ -82,7 +83,11 @@ object AttachmentPreview {
|
|||||||
for {
|
for {
|
||||||
fileId <- png
|
fileId <- png
|
||||||
.through(
|
.through(
|
||||||
ctx.store.fileStore.save(MimeTypeHint(name.map(_.fullName), Some("image/png")))
|
ctx.store.fileRepo.save(
|
||||||
|
collective,
|
||||||
|
FileCategory.PreviewImage,
|
||||||
|
MimeTypeHint(name.map(_.fullName), Some("image/png"))
|
||||||
|
)
|
||||||
)
|
)
|
||||||
.compile
|
.compile
|
||||||
.lastOrError
|
.lastOrError
|
||||||
@ -99,5 +104,5 @@ object AttachmentPreview {
|
|||||||
.getOrElse(MimeType.octetStream)
|
.getOrElse(MimeType.octetStream)
|
||||||
|
|
||||||
def loadFile[F[_]](ctx: Context[F, _])(ra: RAttachment): Stream[F, Byte] =
|
def loadFile[F[_]](ctx: Context[F, _])(ra: RAttachment): Stream[F, Byte] =
|
||||||
ctx.store.fileStore.getBytes(ra.fileId)
|
ctx.store.fileRepo.getBytes(ra.fileId)
|
||||||
}
|
}
|
||||||
|
@ -32,11 +32,12 @@ import docspell.store.records._
|
|||||||
* This step assumes an existing premature item, it traverses its attachments.
|
* This step assumes an existing premature item, it traverses its attachments.
|
||||||
*/
|
*/
|
||||||
object ConvertPdf {
|
object ConvertPdf {
|
||||||
|
type Args = ProcessItemArgs
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async](
|
||||||
cfg: ConvertConfig,
|
cfg: ConvertConfig,
|
||||||
item: ItemData
|
item: ItemData
|
||||||
): Task[F, ProcessItemArgs, ItemData] =
|
): Task[F, Args, ItemData] =
|
||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
def convert(ra: RAttachment): F[(RAttachment, Option[RAttachmentMeta])] =
|
def convert(ra: RAttachment): F[(RAttachment, Option[RAttachmentMeta])] =
|
||||||
isConverted(ctx)(ra).flatMap {
|
isConverted(ctx)(ra).flatMap {
|
||||||
@ -61,7 +62,7 @@ object ConvertPdf {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def isConverted[F[_]](ctx: Context[F, ProcessItemArgs])(
|
def isConverted[F[_]](ctx: Context[F, Args])(
|
||||||
ra: RAttachment
|
ra: RAttachment
|
||||||
): F[Boolean] =
|
): F[Boolean] =
|
||||||
ctx.store.transact(RAttachmentSource.isConverted(ra.id))
|
ctx.store.transact(RAttachmentSource.isConverted(ra.id))
|
||||||
@ -74,14 +75,14 @@ object ConvertPdf {
|
|||||||
def convertSafe[F[_]: Async](
|
def convertSafe[F[_]: Async](
|
||||||
cfg: ConvertConfig,
|
cfg: ConvertConfig,
|
||||||
sanitizeHtml: SanitizeHtml,
|
sanitizeHtml: SanitizeHtml,
|
||||||
ctx: Context[F, ProcessItemArgs],
|
ctx: Context[F, Args],
|
||||||
item: ItemData
|
item: ItemData
|
||||||
)(ra: RAttachment, mime: MimeType): F[(RAttachment, Option[RAttachmentMeta])] =
|
)(ra: RAttachment, mime: MimeType): F[(RAttachment, Option[RAttachmentMeta])] =
|
||||||
loadCollectivePasswords(ctx).flatMap(collPass =>
|
loadCollectivePasswords(ctx).flatMap(collPass =>
|
||||||
Conversion.create[F](cfg, sanitizeHtml, collPass, ctx.logger).use { conv =>
|
Conversion.create[F](cfg, sanitizeHtml, collPass, ctx.logger).use { conv =>
|
||||||
mime match {
|
mime match {
|
||||||
case mt =>
|
case mt =>
|
||||||
val data = ctx.store.fileStore.getBytes(ra.fileId)
|
val data = ctx.store.fileRepo.getBytes(ra.fileId)
|
||||||
val handler = conversionHandler[F](ctx, cfg, ra, item)
|
val handler = conversionHandler[F](ctx, cfg, ra, item)
|
||||||
ctx.logger
|
ctx.logger
|
||||||
.info(s"Converting file ${ra.name} (${mime.asString}) into a PDF") *>
|
.info(s"Converting file ${ra.name} (${mime.asString}) into a PDF") *>
|
||||||
@ -93,14 +94,14 @@ object ConvertPdf {
|
|||||||
)
|
)
|
||||||
|
|
||||||
private def loadCollectivePasswords[F[_]: Async](
|
private def loadCollectivePasswords[F[_]: Async](
|
||||||
ctx: Context[F, ProcessItemArgs]
|
ctx: Context[F, Args]
|
||||||
): F[List[Password]] =
|
): F[List[Password]] =
|
||||||
ctx.store
|
ctx.store
|
||||||
.transact(RCollectivePassword.findAll(ctx.args.meta.collective))
|
.transact(RCollectivePassword.findAll(ctx.args.meta.collective))
|
||||||
.map(_.map(_.password).distinct)
|
.map(_.map(_.password).distinct)
|
||||||
|
|
||||||
private def conversionHandler[F[_]: Sync](
|
private def conversionHandler[F[_]: Sync](
|
||||||
ctx: Context[F, ProcessItemArgs],
|
ctx: Context[F, Args],
|
||||||
cfg: ConvertConfig,
|
cfg: ConvertConfig,
|
||||||
ra: RAttachment,
|
ra: RAttachment,
|
||||||
item: ItemData
|
item: ItemData
|
||||||
@ -146,7 +147,7 @@ object ConvertPdf {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private def storePDF[F[_]: Sync](
|
private def storePDF[F[_]: Sync](
|
||||||
ctx: Context[F, ProcessItemArgs],
|
ctx: Context[F, Args],
|
||||||
cfg: ConvertConfig,
|
cfg: ConvertConfig,
|
||||||
ra: RAttachment,
|
ra: RAttachment,
|
||||||
pdf: Stream[F, Byte]
|
pdf: Stream[F, Byte]
|
||||||
@ -160,7 +161,13 @@ object ConvertPdf {
|
|||||||
.map(_.fullName)
|
.map(_.fullName)
|
||||||
|
|
||||||
pdf
|
pdf
|
||||||
.through(ctx.store.fileStore.save(MimeTypeHint(hint.filename, hint.advertised)))
|
.through(
|
||||||
|
ctx.store.fileRepo.save(
|
||||||
|
ctx.args.meta.collective,
|
||||||
|
FileCategory.AttachmentConvert,
|
||||||
|
MimeTypeHint(hint.filename, hint.advertised)
|
||||||
|
)
|
||||||
|
)
|
||||||
.compile
|
.compile
|
||||||
.lastOrError
|
.lastOrError
|
||||||
.flatMap(fmId => updateAttachment[F](ctx, ra, fmId, newName).map(_ => fmId))
|
.flatMap(fmId => updateAttachment[F](ctx, ra, fmId, newName).map(_ => fmId))
|
||||||
@ -170,7 +177,7 @@ object ConvertPdf {
|
|||||||
private def updateAttachment[F[_]: Sync](
|
private def updateAttachment[F[_]: Sync](
|
||||||
ctx: Context[F, _],
|
ctx: Context[F, _],
|
||||||
ra: RAttachment,
|
ra: RAttachment,
|
||||||
fmId: Ident,
|
fmId: FileKey,
|
||||||
newName: Option[String]
|
newName: Option[String]
|
||||||
): F[Unit] =
|
): F[Unit] =
|
||||||
for {
|
for {
|
||||||
@ -188,7 +195,7 @@ object ConvertPdf {
|
|||||||
if (sameFile) ().pure[F]
|
if (sameFile) ().pure[F]
|
||||||
else
|
else
|
||||||
ctx.logger.info("Deleting previous attachment file") *>
|
ctx.logger.info("Deleting previous attachment file") *>
|
||||||
ctx.store.fileStore
|
ctx.store.fileRepo
|
||||||
.delete(raPrev.fileId)
|
.delete(raPrev.fileId)
|
||||||
.attempt
|
.attempt
|
||||||
.flatMap {
|
.flatMap {
|
||||||
|
@ -14,6 +14,7 @@ import fs2.Stream
|
|||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.joex.scheduler.{Context, Task}
|
import docspell.joex.scheduler.{Context, Task}
|
||||||
|
import docspell.store.file.FileMetadata
|
||||||
import docspell.store.queries.QItem
|
import docspell.store.queries.QItem
|
||||||
import docspell.store.records._
|
import docspell.store.records._
|
||||||
|
|
||||||
@ -28,7 +29,7 @@ object CreateItem {
|
|||||||
|
|
||||||
def createNew[F[_]: Sync]: Task[F, ProcessItemArgs, ItemData] =
|
def createNew[F[_]: Sync]: Task[F, ProcessItemArgs, ItemData] =
|
||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
def isValidFile(fm: RFileMeta) =
|
def isValidFile(fm: FileMetadata) =
|
||||||
ctx.args.meta.validFileTypes.isEmpty ||
|
ctx.args.meta.validFileTypes.isEmpty ||
|
||||||
ctx.args.meta.validFileTypes.toSet
|
ctx.args.meta.validFileTypes.toSet
|
||||||
.contains(fm.mimetype)
|
.contains(fm.mimetype)
|
||||||
@ -39,9 +40,7 @@ object CreateItem {
|
|||||||
.flatMap { offset =>
|
.flatMap { offset =>
|
||||||
Stream
|
Stream
|
||||||
.emits(ctx.args.files)
|
.emits(ctx.args.files)
|
||||||
.evalMap(f =>
|
.evalMap(f => ctx.store.fileRepo.findMeta(f.fileMetaId).map(fm => (f, fm)))
|
||||||
ctx.store.fileStore.findMeta(f.fileMetaId).value.map(fm => (f, fm))
|
|
||||||
)
|
|
||||||
.collect { case (f, Some(fm)) if isValidFile(fm) => f }
|
.collect { case (f, Some(fm)) if isValidFile(fm) => f }
|
||||||
.zipWithIndex
|
.zipWithIndex
|
||||||
.evalMap { case (f, index) =>
|
.evalMap { case (f, index) =>
|
||||||
@ -198,6 +197,6 @@ object CreateItem {
|
|||||||
// TODO if no source is present, it must be saved!
|
// TODO if no source is present, it must be saved!
|
||||||
private def originFileTuple(
|
private def originFileTuple(
|
||||||
t: (RAttachment, Option[RAttachmentSource])
|
t: (RAttachment, Option[RAttachmentSource])
|
||||||
): (Ident, Ident) =
|
): (Ident, FileKey) =
|
||||||
t._2.map(s => s.id -> s.fileId).getOrElse(t._1.id -> t._1.fileId)
|
t._2.map(s => s.id -> s.fileId).getOrElse(t._1.id -> t._1.fileId)
|
||||||
}
|
}
|
||||||
|
@ -51,7 +51,7 @@ object DuplicateCheck {
|
|||||||
val fname = ctx.args.files.find(_.fileMetaId == fd.fm.id).flatMap(_.name)
|
val fname = ctx.args.files.find(_.fileMetaId == fd.fm.id).flatMap(_.name)
|
||||||
if (fd.exists)
|
if (fd.exists)
|
||||||
ctx.logger
|
ctx.logger
|
||||||
.info(s"Deleting duplicate file $fname!") *> ctx.store.fileStore
|
.info(s"Deleting duplicate file $fname!") *> ctx.store.fileRepo
|
||||||
.delete(fd.fm.id)
|
.delete(fd.fm.id)
|
||||||
else ().pure[F]
|
else ().pure[F]
|
||||||
}
|
}
|
||||||
|
@ -32,16 +32,17 @@ import emil.Mail
|
|||||||
* This step assumes an existing premature item, it traverses its attachments.
|
* This step assumes an existing premature item, it traverses its attachments.
|
||||||
*/
|
*/
|
||||||
object ExtractArchive {
|
object ExtractArchive {
|
||||||
|
type Args = ProcessItemArgs
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async](
|
||||||
item: ItemData
|
item: ItemData
|
||||||
): Task[F, ProcessItemArgs, ItemData] =
|
): Task[F, Args, ItemData] =
|
||||||
multiPass(item, None).map(_._2)
|
multiPass(item, None).map(_._2)
|
||||||
|
|
||||||
def multiPass[F[_]: Async](
|
def multiPass[F[_]: Async](
|
||||||
item: ItemData,
|
item: ItemData,
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
): Task[F, ProcessItemArgs, (Option[RAttachmentArchive], ItemData)] =
|
): Task[F, Args, (Option[RAttachmentArchive], ItemData)] =
|
||||||
singlePass(item, archive).flatMap { t =>
|
singlePass(item, archive).flatMap { t =>
|
||||||
if (t._1.isEmpty) Task.pure(t)
|
if (t._1.isEmpty) Task.pure(t)
|
||||||
else multiPass(t._2, t._1)
|
else multiPass(t._2, t._1)
|
||||||
@ -50,7 +51,7 @@ object ExtractArchive {
|
|||||||
def singlePass[F[_]: Async](
|
def singlePass[F[_]: Async](
|
||||||
item: ItemData,
|
item: ItemData,
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
): Task[F, ProcessItemArgs, (Option[RAttachmentArchive], ItemData)] =
|
): Task[F, Args, (Option[RAttachmentArchive], ItemData)] =
|
||||||
Task { ctx =>
|
Task { ctx =>
|
||||||
def extract(ra: RAttachment, pos: Int): F[Extracted] =
|
def extract(ra: RAttachment, pos: Int): F[Extracted] =
|
||||||
findMime(ctx)(ra).flatMap(m => extractSafe(ctx, archive)(ra, pos, m))
|
findMime(ctx)(ra).flatMap(m => extractSafe(ctx, archive)(ra, pos, m))
|
||||||
@ -88,7 +89,7 @@ object ExtractArchive {
|
|||||||
.getOrElse(MimeType.octetStream)
|
.getOrElse(MimeType.octetStream)
|
||||||
|
|
||||||
def extractSafe[F[_]: Async](
|
def extractSafe[F[_]: Async](
|
||||||
ctx: Context[F, ProcessItemArgs],
|
ctx: Context[F, Args],
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
)(ra: RAttachment, pos: Int, mime: MimeType): F[Extracted] =
|
)(ra: RAttachment, pos: Int, mime: MimeType): F[Extracted] =
|
||||||
mime match {
|
mime match {
|
||||||
@ -120,7 +121,7 @@ object ExtractArchive {
|
|||||||
)
|
)
|
||||||
_ <- ctx.store.transact(RAttachmentArchive.delete(ra.id))
|
_ <- ctx.store.transact(RAttachmentArchive.delete(ra.id))
|
||||||
_ <- ctx.store.transact(RAttachment.delete(ra.id))
|
_ <- ctx.store.transact(RAttachment.delete(ra.id))
|
||||||
_ <- ctx.store.fileStore.delete(ra.fileId)
|
_ <- ctx.store.fileRepo.delete(ra.fileId)
|
||||||
} yield extracted
|
} yield extracted
|
||||||
case None =>
|
case None =>
|
||||||
for {
|
for {
|
||||||
@ -132,10 +133,10 @@ object ExtractArchive {
|
|||||||
}
|
}
|
||||||
|
|
||||||
def extractZip[F[_]: Async](
|
def extractZip[F[_]: Async](
|
||||||
ctx: Context[F, ProcessItemArgs],
|
ctx: Context[F, Args],
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
)(ra: RAttachment, pos: Int): F[Extracted] = {
|
)(ra: RAttachment, pos: Int): F[Extracted] = {
|
||||||
val zipData = ctx.store.fileStore.getBytes(ra.fileId)
|
val zipData = ctx.store.fileRepo.getBytes(ra.fileId)
|
||||||
val glob = ctx.args.meta.fileFilter.getOrElse(Glob.all)
|
val glob = ctx.args.meta.fileFilter.getOrElse(Glob.all)
|
||||||
ctx.logger.debug(s"Filtering zip entries with '${glob.asString}'") *>
|
ctx.logger.debug(s"Filtering zip entries with '${glob.asString}'") *>
|
||||||
zipData
|
zipData
|
||||||
@ -148,10 +149,10 @@ object ExtractArchive {
|
|||||||
}
|
}
|
||||||
|
|
||||||
def extractMail[F[_]: Async](
|
def extractMail[F[_]: Async](
|
||||||
ctx: Context[F, ProcessItemArgs],
|
ctx: Context[F, Args],
|
||||||
archive: Option[RAttachmentArchive]
|
archive: Option[RAttachmentArchive]
|
||||||
)(ra: RAttachment, pos: Int): F[Extracted] = {
|
)(ra: RAttachment, pos: Int): F[Extracted] = {
|
||||||
val email: Stream[F, Byte] = ctx.store.fileStore.getBytes(ra.fileId)
|
val email: Stream[F, Byte] = ctx.store.fileRepo.getBytes(ra.fileId)
|
||||||
|
|
||||||
val glob = ctx.args.meta.fileFilter.getOrElse(Glob.all)
|
val glob = ctx.args.meta.fileFilter.getOrElse(Glob.all)
|
||||||
val attachOnly = ctx.args.meta.attachmentsOnly.getOrElse(false)
|
val attachOnly = ctx.args.meta.attachmentsOnly.getOrElse(false)
|
||||||
@ -183,7 +184,7 @@ object ExtractArchive {
|
|||||||
.getOrElse(Extracted.empty)
|
.getOrElse(Extracted.empty)
|
||||||
|
|
||||||
def handleEntry[F[_]: Sync](
|
def handleEntry[F[_]: Sync](
|
||||||
ctx: Context[F, _],
|
ctx: Context[F, Args],
|
||||||
ra: RAttachment,
|
ra: RAttachment,
|
||||||
pos: Int,
|
pos: Int,
|
||||||
archive: Option[RAttachmentArchive],
|
archive: Option[RAttachmentArchive],
|
||||||
@ -193,7 +194,10 @@ object ExtractArchive {
|
|||||||
): Stream[F, Extracted] = {
|
): Stream[F, Extracted] = {
|
||||||
val (entry, subPos) = tentry
|
val (entry, subPos) = tentry
|
||||||
val mimeHint = MimeTypeHint.filename(entry.name).withAdvertised(entry.mime.asString)
|
val mimeHint = MimeTypeHint.filename(entry.name).withAdvertised(entry.mime.asString)
|
||||||
val fileId = entry.data.through(ctx.store.fileStore.save(mimeHint))
|
val fileId = entry.data.through(
|
||||||
|
ctx.store.fileRepo
|
||||||
|
.save(ctx.args.meta.collective, FileCategory.AttachmentSource, mimeHint)
|
||||||
|
)
|
||||||
|
|
||||||
Stream.eval(ctx.logger.debug(s"Extracted ${entry.name}. Storing as attachment.")) >>
|
Stream.eval(ctx.logger.debug(s"Extracted ${entry.name}. Storing as attachment.")) >>
|
||||||
fileId.evalMap { fid =>
|
fileId.evalMap { fid =>
|
||||||
|
@ -38,7 +38,7 @@ case class ItemData(
|
|||||||
attachments: Vector[RAttachment],
|
attachments: Vector[RAttachment],
|
||||||
metas: Vector[RAttachmentMeta],
|
metas: Vector[RAttachmentMeta],
|
||||||
dateLabels: Vector[AttachmentDates],
|
dateLabels: Vector[AttachmentDates],
|
||||||
originFile: Map[Ident, Ident], // maps RAttachment.id -> FileMeta.id
|
originFile: Map[Ident, FileKey], // maps RAttachment.id -> FileMeta.id
|
||||||
givenMeta: MetaProposalList, // given meta data not associated to a specific attachment
|
givenMeta: MetaProposalList, // given meta data not associated to a specific attachment
|
||||||
// a list of tags (names or ids) attached to the item if they exist
|
// a list of tags (names or ids) attached to the item if they exist
|
||||||
tags: List[String],
|
tags: List[String],
|
||||||
|
@ -133,7 +133,7 @@ object ItemHandler {
|
|||||||
ctx.logger.info("Deleting input files …") *>
|
ctx.logger.info("Deleting input files …") *>
|
||||||
Stream
|
Stream
|
||||||
.emits(ctx.args.files.map(_.fileMetaId))
|
.emits(ctx.args.files.map(_.fileMetaId))
|
||||||
.evalMap(id => ctx.store.fileStore.delete(id).attempt)
|
.evalMap(id => ctx.store.fileRepo.delete(id).attempt)
|
||||||
.compile
|
.compile
|
||||||
.drain
|
.drain
|
||||||
)
|
)
|
||||||
|
@ -126,11 +126,11 @@ object TextExtraction {
|
|||||||
ctx: Context[F, _],
|
ctx: Context[F, _],
|
||||||
extr: Extraction[F],
|
extr: Extraction[F],
|
||||||
lang: Language
|
lang: Language
|
||||||
)(fileId: Ident): F[ExtractResult] = {
|
)(fileId: FileKey): F[ExtractResult] = {
|
||||||
val data = ctx.store.fileStore.getBytes(fileId)
|
val data = ctx.store.fileRepo.getBytes(fileId)
|
||||||
|
|
||||||
def findMime: F[MimeType] =
|
def findMime: F[MimeType] =
|
||||||
OptionT(ctx.store.transact(RFileMeta.findById(fileId)))
|
OptionT(ctx.store.fileRepo.findMeta(fileId))
|
||||||
.map(_.mimetype)
|
.map(_.mimetype)
|
||||||
.getOrElse(MimeType.octetStream)
|
.getOrElse(MimeType.octetStream)
|
||||||
|
|
||||||
@ -143,7 +143,7 @@ object TextExtraction {
|
|||||||
cfg: ExtractConfig,
|
cfg: ExtractConfig,
|
||||||
ra: RAttachment,
|
ra: RAttachment,
|
||||||
lang: Language
|
lang: Language
|
||||||
)(fileIds: List[Ident]): F[Option[ExtractResult.Success]] =
|
)(fileIds: List[FileKey]): F[Option[ExtractResult.Success]] =
|
||||||
fileIds match {
|
fileIds match {
|
||||||
case Nil =>
|
case Nil =>
|
||||||
ctx.logger.error(s"Cannot extract text").map(_ => None)
|
ctx.logger.error(s"Cannot extract text").map(_ => None)
|
||||||
@ -179,7 +179,7 @@ object TextExtraction {
|
|||||||
private def filesToExtract[F[_]: Sync](ctx: Context[F, _])(
|
private def filesToExtract[F[_]: Sync](ctx: Context[F, _])(
|
||||||
item: ItemData,
|
item: ItemData,
|
||||||
ra: RAttachment
|
ra: RAttachment
|
||||||
): F[List[Ident]] =
|
): F[List[FileKey]] =
|
||||||
item.originFile.get(ra.id) match {
|
item.originFile.get(ra.id) match {
|
||||||
case Some(sid) =>
|
case Some(sid) =>
|
||||||
ctx.store.transact(RFileMeta.findMime(sid)).map {
|
ctx.store.transact(RFileMeta.findMime(sid)).map {
|
||||||
|
@ -15,7 +15,7 @@ import docspell.backend.ops.OItemSearch.{AttachmentData, AttachmentPreviewData}
|
|||||||
import docspell.backend.ops._
|
import docspell.backend.ops._
|
||||||
import docspell.restapi.model.BasicResult
|
import docspell.restapi.model.BasicResult
|
||||||
import docspell.restserver.http4s.{QueryParam => QP}
|
import docspell.restserver.http4s.{QueryParam => QP}
|
||||||
import docspell.store.records.RFileMeta
|
import docspell.store.file.FileMetadata
|
||||||
|
|
||||||
import org.http4s._
|
import org.http4s._
|
||||||
import org.http4s.circe.CirceEntityEncoder._
|
import org.http4s.circe.CirceEntityEncoder._
|
||||||
@ -117,7 +117,7 @@ object BinaryUtil {
|
|||||||
}
|
}
|
||||||
|
|
||||||
def matchETag[F[_]](
|
def matchETag[F[_]](
|
||||||
fileData: Option[RFileMeta],
|
fileData: Option[FileMetadata],
|
||||||
noneMatch: Option[NonEmptyList[EntityTag]]
|
noneMatch: Option[NonEmptyList[EntityTag]]
|
||||||
): Boolean =
|
): Boolean =
|
||||||
(fileData, noneMatch) match {
|
(fileData, noneMatch) match {
|
||||||
|
@ -12,7 +12,7 @@ import cats.effect._
|
|||||||
import cats.~>
|
import cats.~>
|
||||||
import fs2._
|
import fs2._
|
||||||
|
|
||||||
import docspell.store.file.FileStore
|
import docspell.store.file.FileRepository
|
||||||
import docspell.store.impl.StoreImpl
|
import docspell.store.impl.StoreImpl
|
||||||
|
|
||||||
import com.zaxxer.hikari.HikariDataSource
|
import com.zaxxer.hikari.HikariDataSource
|
||||||
@ -26,7 +26,7 @@ trait Store[F[_]] {
|
|||||||
|
|
||||||
def transact[A](prg: Stream[ConnectionIO, A]): Stream[F, A]
|
def transact[A](prg: Stream[ConnectionIO, A]): Stream[F, A]
|
||||||
|
|
||||||
def fileStore: FileStore[F]
|
def fileRepo: FileRepository[F]
|
||||||
|
|
||||||
def add(insert: ConnectionIO[Int], exists: ConnectionIO[Boolean]): F[AddResult]
|
def add(insert: ConnectionIO[Int], exists: ConnectionIO[Boolean]): F[AddResult]
|
||||||
}
|
}
|
||||||
@ -50,8 +50,8 @@ object Store {
|
|||||||
ds.setDriverClassName(jdbc.driverClass)
|
ds.setDriverClassName(jdbc.driverClass)
|
||||||
}
|
}
|
||||||
xa = HikariTransactor(ds, connectEC)
|
xa = HikariTransactor(ds, connectEC)
|
||||||
fs = FileStore[F](xa, ds, chunkSize)
|
fr = FileRepository.genericJDBC(xa, ds, chunkSize)
|
||||||
st = new StoreImpl[F](fs, jdbc, xa)
|
st = new StoreImpl[F](fr, jdbc, xa)
|
||||||
_ <- Resource.eval(st.migrate)
|
_ <- Resource.eval(st.migrate)
|
||||||
} yield st
|
} yield st
|
||||||
}
|
}
|
||||||
|
@ -23,8 +23,9 @@ final private[file] class AttributeStore[F[_]: Sync](xa: Transactor[F])
|
|||||||
for {
|
for {
|
||||||
now <- Timestamp.current[F]
|
now <- Timestamp.current[F]
|
||||||
a <- attrs
|
a <- attrs
|
||||||
|
fileKey <- makeFileKey(id)
|
||||||
fm = RFileMeta(
|
fm = RFileMeta(
|
||||||
Ident.unsafe(id.id),
|
fileKey,
|
||||||
now,
|
now,
|
||||||
MimeType.parse(a.contentType.contentType).getOrElse(MimeType.octetStream),
|
MimeType.parse(a.contentType.contentType).getOrElse(MimeType.octetStream),
|
||||||
ByteSize(a.length),
|
ByteSize(a.length),
|
||||||
@ -34,7 +35,7 @@ final private[file] class AttributeStore[F[_]: Sync](xa: Transactor[F])
|
|||||||
} yield ()
|
} yield ()
|
||||||
|
|
||||||
def deleteAttr(id: BinaryId): F[Boolean] =
|
def deleteAttr(id: BinaryId): F[Boolean] =
|
||||||
RFileMeta.delete(Ident.unsafe(id.id)).transact(xa).map(_ > 0)
|
makeFileKey(id).flatMap(fileKey => RFileMeta.delete(fileKey).transact(xa).map(_ > 0))
|
||||||
|
|
||||||
def findAttr(id: BinaryId): OptionT[F, BinaryAttributes] =
|
def findAttr(id: BinaryId): OptionT[F, BinaryAttributes] =
|
||||||
findMeta(id).map(fm =>
|
findMeta(id).map(fm =>
|
||||||
@ -46,5 +47,10 @@ final private[file] class AttributeStore[F[_]: Sync](xa: Transactor[F])
|
|||||||
)
|
)
|
||||||
|
|
||||||
def findMeta(id: BinaryId): OptionT[F, RFileMeta] =
|
def findMeta(id: BinaryId): OptionT[F, RFileMeta] =
|
||||||
OptionT(RFileMeta.findById(Ident.unsafe(id.id)).transact(xa))
|
OptionT(makeFileKey(id).flatMap(fileKey => RFileMeta.findById(fileKey).transact(xa)))
|
||||||
|
|
||||||
|
private def makeFileKey(binaryId: BinaryId): F[FileKey] =
|
||||||
|
Sync[F]
|
||||||
|
.pure(BinnyUtils.binaryIdToFileKey(binaryId).left.map(new IllegalStateException(_)))
|
||||||
|
.rethrow
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,59 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.file
|
||||||
|
|
||||||
|
import docspell.common
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.files.TikaMimetype
|
||||||
|
|
||||||
|
import binny._
|
||||||
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
|
private[store] object BinnyUtils {
|
||||||
|
|
||||||
|
def fileKeyToBinaryId(fk: FileKey): BinaryId =
|
||||||
|
BinaryId(s"${fk.collective.id}/${fk.category.id.id}/${fk.id.id}")
|
||||||
|
|
||||||
|
def binaryIdToFileKey(bid: BinaryId): Either[String, FileKey] =
|
||||||
|
bid.id.split('/').toList match {
|
||||||
|
case cId :: catId :: fId :: Nil =>
|
||||||
|
for {
|
||||||
|
coll <- Ident.fromString(cId)
|
||||||
|
cat <- FileCategory.fromString(catId)
|
||||||
|
file <- Ident.fromString(fId)
|
||||||
|
} yield common.FileKey(coll, cat, file)
|
||||||
|
case _ =>
|
||||||
|
Left(s"Invalid format for file-key: $bid")
|
||||||
|
}
|
||||||
|
|
||||||
|
def unsafeBinaryIdToFileKey(bid: BinaryId): FileKey =
|
||||||
|
binaryIdToFileKey(bid).fold(
|
||||||
|
err => throw new IllegalStateException(err),
|
||||||
|
identity
|
||||||
|
)
|
||||||
|
|
||||||
|
object LoggerAdapter {
|
||||||
|
def apply[F[_]](log: Logger[F]): binny.util.Logger[F] =
|
||||||
|
new binny.util.Logger[F] {
|
||||||
|
override def trace(msg: => String): F[Unit] = log.trace(msg)
|
||||||
|
override def debug(msg: => String): F[Unit] = log.debug(msg)
|
||||||
|
override def info(msg: => String): F[Unit] = log.info(msg)
|
||||||
|
override def warn(msg: => String): F[Unit] = log.warn(msg)
|
||||||
|
override def error(msg: => String): F[Unit] = log.error(msg)
|
||||||
|
override def error(ex: Throwable)(msg: => String): F[Unit] = log.error(ex)(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object TikaContentTypeDetect extends ContentTypeDetect {
|
||||||
|
override def detect(data: ByteVector, hint: Hint): SimpleContentType =
|
||||||
|
SimpleContentType(
|
||||||
|
TikaMimetype
|
||||||
|
.detect(data, MimeTypeHint(hint.filename, hint.advertisedType))
|
||||||
|
.asString
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,19 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.file
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
|
||||||
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
|
final case class FileMetadata(
|
||||||
|
id: FileKey,
|
||||||
|
created: Timestamp,
|
||||||
|
mimetype: MimeType,
|
||||||
|
length: ByteSize,
|
||||||
|
checksum: ByteVector
|
||||||
|
)
|
@ -0,0 +1,50 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.file
|
||||||
|
|
||||||
|
import javax.sql.DataSource
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import fs2._
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
|
||||||
|
import binny.BinaryId
|
||||||
|
import binny.jdbc.{GenericJdbcStore, JdbcStoreConfig}
|
||||||
|
import doobie.Transactor
|
||||||
|
|
||||||
|
trait FileRepository[F[_]] {
|
||||||
|
def getBytes(key: FileKey): Stream[F, Byte]
|
||||||
|
|
||||||
|
def findMeta(key: FileKey): F[Option[FileMetadata]]
|
||||||
|
|
||||||
|
def delete(key: FileKey): F[Unit]
|
||||||
|
|
||||||
|
def save(
|
||||||
|
collective: Ident,
|
||||||
|
category: FileCategory,
|
||||||
|
hint: MimeTypeHint
|
||||||
|
): Pipe[F, Byte, FileKey]
|
||||||
|
}
|
||||||
|
|
||||||
|
object FileRepository {
|
||||||
|
private[this] val logger = org.log4s.getLogger
|
||||||
|
|
||||||
|
def genericJDBC[F[_]: Sync](
|
||||||
|
xa: Transactor[F],
|
||||||
|
ds: DataSource,
|
||||||
|
chunkSize: Int
|
||||||
|
): FileRepository[F] = {
|
||||||
|
val attrStore = new AttributeStore[F](xa)
|
||||||
|
val cfg = JdbcStoreConfig("filechunk", chunkSize, BinnyUtils.TikaContentTypeDetect)
|
||||||
|
val log = Logger.log4s[F](logger)
|
||||||
|
val binStore = GenericJdbcStore[F](ds, BinnyUtils.LoggerAdapter(log), cfg, attrStore)
|
||||||
|
val keyFun: FileKey => BinaryId = BinnyUtils.fileKeyToBinaryId
|
||||||
|
|
||||||
|
new FileRepositoryImpl[F](binStore, attrStore, keyFun)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,60 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.file
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect.Sync
|
||||||
|
import cats.implicits._
|
||||||
|
import fs2.{Pipe, Stream}
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
|
||||||
|
import binny._
|
||||||
|
|
||||||
|
final class FileRepositoryImpl[F[_]: Sync](
|
||||||
|
bs: BinaryStore[F],
|
||||||
|
attrStore: AttributeStore[F],
|
||||||
|
keyFun: FileKey => BinaryId
|
||||||
|
) extends FileRepository[F] {
|
||||||
|
|
||||||
|
def find(key: FileKey): OptionT[F, Stream[F, Byte]] =
|
||||||
|
bs.findBinary(keyFun(key), ByteRange.All)
|
||||||
|
|
||||||
|
def getBytes(key: FileKey): Stream[F, Byte] =
|
||||||
|
Stream.eval(find(key).value).unNoneTerminate.flatMap(identity)
|
||||||
|
|
||||||
|
def findMeta(key: FileKey): F[Option[FileMetadata]] =
|
||||||
|
attrStore
|
||||||
|
.findMeta(keyFun(key))
|
||||||
|
.map(rfm =>
|
||||||
|
FileMetadata(rfm.id, rfm.created, rfm.mimetype, rfm.length, rfm.checksum)
|
||||||
|
)
|
||||||
|
.value
|
||||||
|
|
||||||
|
def delete(key: FileKey): F[Unit] =
|
||||||
|
bs.delete(keyFun(key))
|
||||||
|
|
||||||
|
def save(
|
||||||
|
collective: Ident,
|
||||||
|
category: FileCategory,
|
||||||
|
hint: MimeTypeHint
|
||||||
|
): Pipe[F, Byte, FileKey] = {
|
||||||
|
val fhint = Hint(hint.filename, hint.advertised)
|
||||||
|
in =>
|
||||||
|
Stream
|
||||||
|
.eval(randomKey(collective, category))
|
||||||
|
.flatMap(fkey =>
|
||||||
|
in.through(bs.insertWith(keyFun(fkey), fhint)) ++ Stream.emit(fkey)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def randomKey(
|
||||||
|
collective: Ident,
|
||||||
|
category: FileCategory
|
||||||
|
): F[FileKey] =
|
||||||
|
BinaryId.random[F].map(bid => FileKey(collective, category, Ident.unsafe(bid.id)))
|
||||||
|
}
|
@ -1,91 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2020 Eike K. & Contributors
|
|
||||||
*
|
|
||||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
*/
|
|
||||||
|
|
||||||
package docspell.store.file
|
|
||||||
|
|
||||||
import javax.sql.DataSource
|
|
||||||
|
|
||||||
import cats.data.OptionT
|
|
||||||
import cats.effect._
|
|
||||||
import fs2.{Pipe, Stream}
|
|
||||||
|
|
||||||
import docspell.common._
|
|
||||||
import docspell.files.TikaMimetype
|
|
||||||
import docspell.store.records.RFileMeta
|
|
||||||
|
|
||||||
import binny._
|
|
||||||
import binny.jdbc.{GenericJdbcStore, JdbcStoreConfig}
|
|
||||||
import doobie._
|
|
||||||
import scodec.bits.ByteVector
|
|
||||||
|
|
||||||
trait FileStore[F[_]] {
|
|
||||||
|
|
||||||
def find(id: Ident): OptionT[F, Stream[F, Byte]]
|
|
||||||
|
|
||||||
def getBytes(id: Ident): Stream[F, Byte]
|
|
||||||
|
|
||||||
def findMeta(id: Ident): OptionT[F, RFileMeta]
|
|
||||||
|
|
||||||
def delete(id: Ident): F[Unit]
|
|
||||||
|
|
||||||
def save(hint: MimeTypeHint): Pipe[F, Byte, Ident]
|
|
||||||
}
|
|
||||||
|
|
||||||
object FileStore {
|
|
||||||
private[this] val logger = org.log4s.getLogger
|
|
||||||
|
|
||||||
def apply[F[_]: Sync](
|
|
||||||
xa: Transactor[F],
|
|
||||||
ds: DataSource,
|
|
||||||
chunkSize: Int
|
|
||||||
): FileStore[F] = {
|
|
||||||
val attrStore = new AttributeStore[F](xa)
|
|
||||||
val cfg = JdbcStoreConfig("filechunk", chunkSize, TikaContentTypeDetect)
|
|
||||||
val log = Logger.log4s[F](logger)
|
|
||||||
val binStore = GenericJdbcStore[F](ds, LoggerAdapter(log), cfg, attrStore)
|
|
||||||
new Impl[F](binStore, attrStore)
|
|
||||||
}
|
|
||||||
|
|
||||||
final private class Impl[F[_]](bs: BinaryStore[F], attrStore: AttributeStore[F])
|
|
||||||
extends FileStore[F] {
|
|
||||||
def find(id: Ident): OptionT[F, Stream[F, Byte]] =
|
|
||||||
bs.findBinary(BinaryId(id.id), ByteRange.All)
|
|
||||||
|
|
||||||
def getBytes(id: Ident): Stream[F, Byte] =
|
|
||||||
Stream.eval(find(id).value).unNoneTerminate.flatMap(identity)
|
|
||||||
|
|
||||||
def findMeta(id: Ident): OptionT[F, RFileMeta] =
|
|
||||||
attrStore.findMeta(BinaryId(id.id))
|
|
||||||
|
|
||||||
def delete(id: Ident): F[Unit] =
|
|
||||||
bs.delete(BinaryId(id.id))
|
|
||||||
|
|
||||||
def save(hint: MimeTypeHint): Pipe[F, Byte, Ident] =
|
|
||||||
bs.insert(Hint(hint.filename, hint.advertised))
|
|
||||||
.andThen(_.map(bid => Ident.unsafe(bid.id)))
|
|
||||||
}
|
|
||||||
|
|
||||||
private object LoggerAdapter {
|
|
||||||
def apply[F[_]](log: Logger[F]): binny.util.Logger[F] =
|
|
||||||
new binny.util.Logger[F] {
|
|
||||||
override def trace(msg: => String): F[Unit] = log.trace(msg)
|
|
||||||
override def debug(msg: => String): F[Unit] = log.debug(msg)
|
|
||||||
override def info(msg: => String): F[Unit] = log.info(msg)
|
|
||||||
override def warn(msg: => String): F[Unit] = log.warn(msg)
|
|
||||||
override def error(msg: => String): F[Unit] = log.error(msg)
|
|
||||||
override def error(ex: Throwable)(msg: => String): F[Unit] = log.error(ex)(msg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private object TikaContentTypeDetect extends ContentTypeDetect {
|
|
||||||
override def detect(data: ByteVector, hint: Hint): SimpleContentType =
|
|
||||||
SimpleContentType(
|
|
||||||
TikaMimetype
|
|
||||||
.detect(data, MimeTypeHint(hint.filename, hint.advertisedType))
|
|
||||||
.asString
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
@ -14,8 +14,10 @@ import docspell.common.syntax.all._
|
|||||||
import docspell.jsonminiq.JsonMiniQuery
|
import docspell.jsonminiq.JsonMiniQuery
|
||||||
import docspell.notification.api.{ChannelType, EventType}
|
import docspell.notification.api.{ChannelType, EventType}
|
||||||
import docspell.query.{ItemQuery, ItemQueryParser}
|
import docspell.query.{ItemQuery, ItemQueryParser}
|
||||||
|
import docspell.store.file.BinnyUtils
|
||||||
import docspell.totp.Key
|
import docspell.totp.Key
|
||||||
|
|
||||||
|
import binny.BinaryId
|
||||||
import com.github.eikek.calev.CalEvent
|
import com.github.eikek.calev.CalEvent
|
||||||
import doobie._
|
import doobie._
|
||||||
import doobie.implicits.legacy.instant._
|
import doobie.implicits.legacy.instant._
|
||||||
@ -27,7 +29,7 @@ import scodec.bits.ByteVector
|
|||||||
|
|
||||||
trait DoobieMeta extends EmilDoobieMeta {
|
trait DoobieMeta extends EmilDoobieMeta {
|
||||||
|
|
||||||
implicit val sqlLogging = LogHandler {
|
implicit val sqlLogging: LogHandler = LogHandler {
|
||||||
case e @ Success(_, _, _, _) =>
|
case e @ Success(_, _, _, _) =>
|
||||||
DoobieMeta.logger.trace("SQL " + e)
|
DoobieMeta.logger.trace("SQL " + e)
|
||||||
case e =>
|
case e =>
|
||||||
@ -39,58 +41,64 @@ trait DoobieMeta extends EmilDoobieMeta {
|
|||||||
e.apply(a).noSpaces
|
e.apply(a).noSpaces
|
||||||
)
|
)
|
||||||
|
|
||||||
|
implicit val metaBinaryId: Meta[BinaryId] =
|
||||||
|
Meta[String].timap(BinaryId.apply)(_.id)
|
||||||
|
|
||||||
|
implicit val metaFileKey: Meta[FileKey] =
|
||||||
|
Meta[BinaryId].timap(BinnyUtils.unsafeBinaryIdToFileKey)(BinnyUtils.fileKeyToBinaryId)
|
||||||
|
|
||||||
implicit val metaAccountSource: Meta[AccountSource] =
|
implicit val metaAccountSource: Meta[AccountSource] =
|
||||||
Meta[String].imap(AccountSource.unsafeFromString)(_.name)
|
Meta[String].timap(AccountSource.unsafeFromString)(_.name)
|
||||||
|
|
||||||
implicit val metaDuration: Meta[Duration] =
|
implicit val metaDuration: Meta[Duration] =
|
||||||
Meta[Long].imap(Duration.millis)(_.millis)
|
Meta[Long].timap(Duration.millis)(_.millis)
|
||||||
|
|
||||||
implicit val metaCollectiveState: Meta[CollectiveState] =
|
implicit val metaCollectiveState: Meta[CollectiveState] =
|
||||||
Meta[String].imap(CollectiveState.unsafe)(CollectiveState.asString)
|
Meta[String].timap(CollectiveState.unsafe)(CollectiveState.asString)
|
||||||
|
|
||||||
implicit val metaUserState: Meta[UserState] =
|
implicit val metaUserState: Meta[UserState] =
|
||||||
Meta[String].imap(UserState.unsafe)(UserState.asString)
|
Meta[String].timap(UserState.unsafe)(UserState.asString)
|
||||||
|
|
||||||
implicit val metaPassword: Meta[Password] =
|
implicit val metaPassword: Meta[Password] =
|
||||||
Meta[String].imap(Password(_))(_.pass)
|
Meta[String].timap(Password(_))(_.pass)
|
||||||
|
|
||||||
implicit val metaIdent: Meta[Ident] =
|
implicit val metaIdent: Meta[Ident] =
|
||||||
Meta[String].imap(Ident.unsafe)(_.id)
|
Meta[String].timap(Ident.unsafe)(_.id)
|
||||||
|
|
||||||
implicit val metaContactKind: Meta[ContactKind] =
|
implicit val metaContactKind: Meta[ContactKind] =
|
||||||
Meta[String].imap(ContactKind.unsafe)(_.asString)
|
Meta[String].timap(ContactKind.unsafe)(_.asString)
|
||||||
|
|
||||||
implicit val metaTimestamp: Meta[Timestamp] =
|
implicit val metaTimestamp: Meta[Timestamp] =
|
||||||
Meta[Instant].imap(Timestamp(_))(_.value)
|
Meta[Instant].timap(Timestamp(_))(_.value)
|
||||||
|
|
||||||
implicit val metaJobState: Meta[JobState] =
|
implicit val metaJobState: Meta[JobState] =
|
||||||
Meta[String].imap(JobState.unsafe)(_.name)
|
Meta[String].timap(JobState.unsafe)(_.name)
|
||||||
|
|
||||||
implicit val metaDirection: Meta[Direction] =
|
implicit val metaDirection: Meta[Direction] =
|
||||||
Meta[Boolean].imap(flag =>
|
Meta[Boolean].timap(flag =>
|
||||||
if (flag) Direction.Incoming: Direction else Direction.Outgoing: Direction
|
if (flag) Direction.Incoming: Direction else Direction.Outgoing: Direction
|
||||||
)(d => Direction.isIncoming(d))
|
)(d => Direction.isIncoming(d))
|
||||||
|
|
||||||
implicit val metaPriority: Meta[Priority] =
|
implicit val metaPriority: Meta[Priority] =
|
||||||
Meta[Int].imap(Priority.fromInt)(Priority.toInt)
|
Meta[Int].timap(Priority.fromInt)(Priority.toInt)
|
||||||
|
|
||||||
implicit val metaLogLevel: Meta[LogLevel] =
|
implicit val metaLogLevel: Meta[LogLevel] =
|
||||||
Meta[String].imap(LogLevel.unsafeString)(_.name)
|
Meta[String].timap(LogLevel.unsafeString)(_.name)
|
||||||
|
|
||||||
implicit val metaLenientUri: Meta[LenientUri] =
|
implicit val metaLenientUri: Meta[LenientUri] =
|
||||||
Meta[String].imap(LenientUri.unsafe)(_.asString)
|
Meta[String].timap(LenientUri.unsafe)(_.asString)
|
||||||
|
|
||||||
implicit val metaNodeType: Meta[NodeType] =
|
implicit val metaNodeType: Meta[NodeType] =
|
||||||
Meta[String].imap(NodeType.unsafe)(_.name)
|
Meta[String].timap(NodeType.unsafe)(_.name)
|
||||||
|
|
||||||
implicit val metaLocalDate: Meta[LocalDate] =
|
implicit val metaLocalDate: Meta[LocalDate] =
|
||||||
Meta[String].imap(str => LocalDate.parse(str))(_.format(DateTimeFormatter.ISO_DATE))
|
Meta[String].timap(str => LocalDate.parse(str))(_.format(DateTimeFormatter.ISO_DATE))
|
||||||
|
|
||||||
implicit val metaItemState: Meta[ItemState] =
|
implicit val metaItemState: Meta[ItemState] =
|
||||||
Meta[String].imap(ItemState.unsafe)(_.name)
|
Meta[String].timap(ItemState.unsafe)(_.name)
|
||||||
|
|
||||||
implicit val metNerTag: Meta[NerTag] =
|
implicit val metNerTag: Meta[NerTag] =
|
||||||
Meta[String].imap(NerTag.unsafe)(_.name)
|
Meta[String].timap(NerTag.unsafe)(_.name)
|
||||||
|
|
||||||
implicit val metaNerLabel: Meta[NerLabel] =
|
implicit val metaNerLabel: Meta[NerLabel] =
|
||||||
jsonMeta[NerLabel]
|
jsonMeta[NerLabel]
|
||||||
@ -108,7 +116,7 @@ trait DoobieMeta extends EmilDoobieMeta {
|
|||||||
jsonMeta[List[IdRef]]
|
jsonMeta[List[IdRef]]
|
||||||
|
|
||||||
implicit val metaLanguage: Meta[Language] =
|
implicit val metaLanguage: Meta[Language] =
|
||||||
Meta[String].imap(Language.unsafe)(_.iso3)
|
Meta[String].timap(Language.unsafe)(_.iso3)
|
||||||
|
|
||||||
implicit val metaCalEvent: Meta[CalEvent] =
|
implicit val metaCalEvent: Meta[CalEvent] =
|
||||||
Meta[String].timap(CalEvent.unsafe)(_.asString)
|
Meta[String].timap(CalEvent.unsafe)(_.asString)
|
||||||
|
@ -11,7 +11,7 @@ import cats.effect.Async
|
|||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import cats.~>
|
import cats.~>
|
||||||
|
|
||||||
import docspell.store.file.FileStore
|
import docspell.store.file.FileRepository
|
||||||
import docspell.store.migrate.FlywayMigrate
|
import docspell.store.migrate.FlywayMigrate
|
||||||
import docspell.store.{AddResult, JdbcConfig, Store}
|
import docspell.store.{AddResult, JdbcConfig, Store}
|
||||||
|
|
||||||
@ -19,7 +19,7 @@ import doobie._
|
|||||||
import doobie.implicits._
|
import doobie.implicits._
|
||||||
|
|
||||||
final class StoreImpl[F[_]: Async](
|
final class StoreImpl[F[_]: Async](
|
||||||
val fileStore: FileStore[F],
|
val fileRepo: FileRepository[F],
|
||||||
jdbc: JdbcConfig,
|
jdbc: JdbcConfig,
|
||||||
xa: Transactor[F]
|
xa: Transactor[F]
|
||||||
) extends Store[F] {
|
) extends Store[F] {
|
||||||
@ -30,10 +30,10 @@ final class StoreImpl[F[_]: Async](
|
|||||||
def migrate: F[Int] =
|
def migrate: F[Int] =
|
||||||
FlywayMigrate.run[F](jdbc).map(_.migrationsExecuted)
|
FlywayMigrate.run[F](jdbc).map(_.migrationsExecuted)
|
||||||
|
|
||||||
def transact[A](prg: doobie.ConnectionIO[A]): F[A] =
|
def transact[A](prg: ConnectionIO[A]): F[A] =
|
||||||
prg.transact(xa)
|
prg.transact(xa)
|
||||||
|
|
||||||
def transact[A](prg: fs2.Stream[doobie.ConnectionIO, A]): fs2.Stream[F, A] =
|
def transact[A](prg: fs2.Stream[ConnectionIO, A]): fs2.Stream[F, A] =
|
||||||
prg.transact(xa)
|
prg.transact(xa)
|
||||||
|
|
||||||
def add(insert: ConnectionIO[Int], exists: ConnectionIO[Boolean]): F[AddResult] =
|
def add(insert: ConnectionIO[Int], exists: ConnectionIO[Boolean]): F[AddResult] =
|
||||||
|
@ -40,7 +40,7 @@ object QAttachment {
|
|||||||
.evalSeq(store.transact(findPreview))
|
.evalSeq(store.transact(findPreview))
|
||||||
.map(_.fileId)
|
.map(_.fileId)
|
||||||
.evalTap(_ => store.transact(RAttachmentPreview.delete(attachId)))
|
.evalTap(_ => store.transact(RAttachmentPreview.delete(attachId)))
|
||||||
.evalMap(store.fileStore.delete)
|
.evalMap(store.fileRepo.delete)
|
||||||
.map(_ => 1)
|
.map(_ => 1)
|
||||||
.compile
|
.compile
|
||||||
.foldMonoid
|
.foldMonoid
|
||||||
@ -68,7 +68,7 @@ object QAttachment {
|
|||||||
f <-
|
f <-
|
||||||
Stream
|
Stream
|
||||||
.emits(files._1)
|
.emits(files._1)
|
||||||
.evalMap(store.fileStore.delete)
|
.evalMap(store.fileRepo.delete)
|
||||||
.map(_ => 1)
|
.map(_ => 1)
|
||||||
.compile
|
.compile
|
||||||
.foldMonoid
|
.foldMonoid
|
||||||
@ -91,7 +91,7 @@ object QAttachment {
|
|||||||
f <-
|
f <-
|
||||||
Stream
|
Stream
|
||||||
.emits(ra.fileId +: (s.map(_.fileId).toSeq ++ p.map(_.fileId).toSeq))
|
.emits(ra.fileId +: (s.map(_.fileId).toSeq ++ p.map(_.fileId).toSeq))
|
||||||
.evalMap(store.fileStore.delete)
|
.evalMap(store.fileRepo.delete)
|
||||||
.map(_ => 1)
|
.map(_ => 1)
|
||||||
.compile
|
.compile
|
||||||
.foldMonoid
|
.foldMonoid
|
||||||
@ -104,7 +104,7 @@ object QAttachment {
|
|||||||
_ <- OptionT.liftF(
|
_ <- OptionT.liftF(
|
||||||
Stream
|
Stream
|
||||||
.emit(aa.fileId)
|
.emit(aa.fileId)
|
||||||
.evalMap(store.fileStore.delete)
|
.evalMap(store.fileRepo.delete)
|
||||||
.compile
|
.compile
|
||||||
.drain
|
.drain
|
||||||
)
|
)
|
||||||
|
@ -15,7 +15,7 @@ import cats.implicits._
|
|||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
|
||||||
import docspell.common.syntax.all._
|
import docspell.common.syntax.all._
|
||||||
import docspell.common.{IdRef, _}
|
import docspell.common.{FileKey, IdRef, _}
|
||||||
import docspell.query.ItemQuery
|
import docspell.query.ItemQuery
|
||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
import docspell.store.qb.DSL._
|
import docspell.store.qb.DSL._
|
||||||
@ -470,7 +470,7 @@ object QItem {
|
|||||||
} yield tn + rn + n + mn + cf + im
|
} yield tn + rn + n + mn + cf + im
|
||||||
|
|
||||||
private def findByFileIdsQuery(
|
private def findByFileIdsQuery(
|
||||||
fileMetaIds: Nel[Ident],
|
fileMetaIds: Nel[FileKey],
|
||||||
states: Option[Nel[ItemState]]
|
states: Option[Nel[ItemState]]
|
||||||
): Select.SimpleSelect = {
|
): Select.SimpleSelect = {
|
||||||
val i = RItem.as("i")
|
val i = RItem.as("i")
|
||||||
@ -490,7 +490,7 @@ object QItem {
|
|||||||
).distinct
|
).distinct
|
||||||
}
|
}
|
||||||
|
|
||||||
def findOneByFileIds(fileMetaIds: Seq[Ident]): ConnectionIO[Option[RItem]] =
|
def findOneByFileIds(fileMetaIds: Seq[FileKey]): ConnectionIO[Option[RItem]] =
|
||||||
Nel.fromList(fileMetaIds.toList) match {
|
Nel.fromList(fileMetaIds.toList) match {
|
||||||
case Some(nel) =>
|
case Some(nel) =>
|
||||||
findByFileIdsQuery(nel, None).limit(1).build.query[RItem].option
|
findByFileIdsQuery(nel, None).limit(1).build.query[RItem].option
|
||||||
@ -499,7 +499,7 @@ object QItem {
|
|||||||
}
|
}
|
||||||
|
|
||||||
def findByFileIds(
|
def findByFileIds(
|
||||||
fileMetaIds: Seq[Ident],
|
fileMetaIds: Seq[FileKey],
|
||||||
states: Nel[ItemState]
|
states: Nel[ItemState]
|
||||||
): ConnectionIO[Vector[RItem]] =
|
): ConnectionIO[Vector[RItem]] =
|
||||||
Nel.fromList(fileMetaIds.toList) match {
|
Nel.fromList(fileMetaIds.toList) match {
|
||||||
@ -512,7 +512,7 @@ object QItem {
|
|||||||
def findByChecksum(
|
def findByChecksum(
|
||||||
checksum: String,
|
checksum: String,
|
||||||
collective: Ident,
|
collective: Ident,
|
||||||
excludeFileMeta: Set[Ident]
|
excludeFileMeta: Set[FileKey]
|
||||||
): ConnectionIO[Vector[RItem]] = {
|
): ConnectionIO[Vector[RItem]] = {
|
||||||
val qq = findByChecksumQuery(checksum, collective, excludeFileMeta).build
|
val qq = findByChecksumQuery(checksum, collective, excludeFileMeta).build
|
||||||
logger.debug(s"FindByChecksum: $qq")
|
logger.debug(s"FindByChecksum: $qq")
|
||||||
@ -522,7 +522,7 @@ object QItem {
|
|||||||
def findByChecksumQuery(
|
def findByChecksumQuery(
|
||||||
checksum: String,
|
checksum: String,
|
||||||
collective: Ident,
|
collective: Ident,
|
||||||
excludeFileMeta: Set[Ident]
|
excludeFileMeta: Set[FileKey]
|
||||||
): Select = {
|
): Select = {
|
||||||
val m1 = RFileMeta.as("m1")
|
val m1 = RFileMeta.as("m1")
|
||||||
val m2 = RFileMeta.as("m2")
|
val m2 = RFileMeta.as("m2")
|
||||||
|
@ -10,7 +10,7 @@ import cats.data.NonEmptyList
|
|||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common.{FileKey, _}
|
||||||
import docspell.store.qb.DSL._
|
import docspell.store.qb.DSL._
|
||||||
import docspell.store.qb._
|
import docspell.store.qb._
|
||||||
|
|
||||||
@ -20,7 +20,7 @@ import doobie.implicits._
|
|||||||
case class RAttachment(
|
case class RAttachment(
|
||||||
id: Ident,
|
id: Ident,
|
||||||
itemId: Ident,
|
itemId: Ident,
|
||||||
fileId: Ident,
|
fileId: FileKey,
|
||||||
position: Int,
|
position: Int,
|
||||||
created: Timestamp,
|
created: Timestamp,
|
||||||
name: Option[String]
|
name: Option[String]
|
||||||
@ -32,7 +32,7 @@ object RAttachment {
|
|||||||
|
|
||||||
val id = Column[Ident]("attachid", this)
|
val id = Column[Ident]("attachid", this)
|
||||||
val itemId = Column[Ident]("itemid", this)
|
val itemId = Column[Ident]("itemid", this)
|
||||||
val fileId = Column[Ident]("filemetaid", this)
|
val fileId = Column[FileKey]("filemetaid", this)
|
||||||
val position = Column[Int]("position", this)
|
val position = Column[Int]("position", this)
|
||||||
val created = Column[Timestamp]("created", this)
|
val created = Column[Timestamp]("created", this)
|
||||||
val name = Column[String]("name", this)
|
val name = Column[String]("name", this)
|
||||||
@ -47,7 +47,7 @@ object RAttachment {
|
|||||||
DML.insert(
|
DML.insert(
|
||||||
T,
|
T,
|
||||||
T.all,
|
T.all,
|
||||||
fr"${v.id},${v.itemId},${v.fileId.id},${v.position},${v.created},${v.name}"
|
fr"${v.id},${v.itemId},${v.fileId},${v.position},${v.created},${v.name}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def decPositions(iId: Ident, lowerBound: Int, upperBound: Int): ConnectionIO[Int] =
|
def decPositions(iId: Ident, lowerBound: Int, upperBound: Int): ConnectionIO[Int] =
|
||||||
@ -77,7 +77,7 @@ object RAttachment {
|
|||||||
|
|
||||||
def updateFileIdAndName(
|
def updateFileIdAndName(
|
||||||
attachId: Ident,
|
attachId: Ident,
|
||||||
fId: Ident,
|
fId: FileKey,
|
||||||
fname: Option[String]
|
fname: Option[String]
|
||||||
): ConnectionIO[Int] =
|
): ConnectionIO[Int] =
|
||||||
DML.update(
|
DML.update(
|
||||||
@ -88,7 +88,7 @@ object RAttachment {
|
|||||||
|
|
||||||
def updateFileId(
|
def updateFileId(
|
||||||
attachId: Ident,
|
attachId: Ident,
|
||||||
fId: Ident
|
fId: FileKey
|
||||||
): ConnectionIO[Int] =
|
): ConnectionIO[Int] =
|
||||||
DML.update(
|
DML.update(
|
||||||
T,
|
T,
|
||||||
@ -182,7 +182,7 @@ object RAttachment {
|
|||||||
def findByItemCollectiveSource(
|
def findByItemCollectiveSource(
|
||||||
id: Ident,
|
id: Ident,
|
||||||
coll: Ident,
|
coll: Ident,
|
||||||
fileIds: NonEmptyList[Ident]
|
fileIds: NonEmptyList[FileKey]
|
||||||
): ConnectionIO[Vector[RAttachment]] = {
|
): ConnectionIO[Vector[RAttachment]] = {
|
||||||
val i = RItem.as("i")
|
val i = RItem.as("i")
|
||||||
val a = RAttachment.as("a")
|
val a = RAttachment.as("a")
|
||||||
|
@ -8,7 +8,7 @@ package docspell.store.records
|
|||||||
|
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common.{FileKey, _}
|
||||||
import docspell.store.qb.DSL._
|
import docspell.store.qb.DSL._
|
||||||
import docspell.store.qb.TableDef
|
import docspell.store.qb.TableDef
|
||||||
import docspell.store.qb._
|
import docspell.store.qb._
|
||||||
@ -21,7 +21,7 @@ import doobie.implicits._
|
|||||||
*/
|
*/
|
||||||
case class RAttachmentArchive(
|
case class RAttachmentArchive(
|
||||||
id: Ident, // same as RAttachment.id
|
id: Ident, // same as RAttachment.id
|
||||||
fileId: Ident,
|
fileId: FileKey,
|
||||||
name: Option[String],
|
name: Option[String],
|
||||||
messageId: Option[String],
|
messageId: Option[String],
|
||||||
created: Timestamp
|
created: Timestamp
|
||||||
@ -32,7 +32,7 @@ object RAttachmentArchive {
|
|||||||
val tableName = "attachment_archive"
|
val tableName = "attachment_archive"
|
||||||
|
|
||||||
val id = Column[Ident]("id", this)
|
val id = Column[Ident]("id", this)
|
||||||
val fileId = Column[Ident]("file_id", this)
|
val fileId = Column[FileKey]("file_id", this)
|
||||||
val name = Column[String]("filename", this)
|
val name = Column[String]("filename", this)
|
||||||
val messageId = Column[String]("message_id", this)
|
val messageId = Column[String]("message_id", this)
|
||||||
val created = Column[Timestamp]("created", this)
|
val created = Column[Timestamp]("created", this)
|
||||||
@ -59,7 +59,7 @@ object RAttachmentArchive {
|
|||||||
def delete(attachId: Ident): ConnectionIO[Int] =
|
def delete(attachId: Ident): ConnectionIO[Int] =
|
||||||
DML.delete(T, T.id === attachId)
|
DML.delete(T, T.id === attachId)
|
||||||
|
|
||||||
def deleteAll(fId: Ident): ConnectionIO[Int] =
|
def deleteAll(fId: FileKey): ConnectionIO[Int] =
|
||||||
DML.delete(T, T.fileId === fId)
|
DML.delete(T, T.fileId === fId)
|
||||||
|
|
||||||
def findByIdAndCollective(
|
def findByIdAndCollective(
|
||||||
|
@ -8,7 +8,7 @@ package docspell.store.records
|
|||||||
|
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common.{FileKey, _}
|
||||||
import docspell.store.qb.DSL._
|
import docspell.store.qb.DSL._
|
||||||
import docspell.store.qb._
|
import docspell.store.qb._
|
||||||
|
|
||||||
@ -20,7 +20,7 @@ import doobie.implicits._
|
|||||||
*/
|
*/
|
||||||
case class RAttachmentPreview(
|
case class RAttachmentPreview(
|
||||||
id: Ident, // same as RAttachment.id
|
id: Ident, // same as RAttachment.id
|
||||||
fileId: Ident,
|
fileId: FileKey,
|
||||||
name: Option[String],
|
name: Option[String],
|
||||||
created: Timestamp
|
created: Timestamp
|
||||||
)
|
)
|
||||||
@ -30,7 +30,7 @@ object RAttachmentPreview {
|
|||||||
val tableName = "attachment_preview"
|
val tableName = "attachment_preview"
|
||||||
|
|
||||||
val id = Column[Ident]("id", this)
|
val id = Column[Ident]("id", this)
|
||||||
val fileId = Column[Ident]("file_id", this)
|
val fileId = Column[FileKey]("file_id", this)
|
||||||
val name = Column[String]("filename", this)
|
val name = Column[String]("filename", this)
|
||||||
val created = Column[Timestamp]("created", this)
|
val created = Column[Timestamp]("created", this)
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ package docspell.store.records
|
|||||||
|
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common.{FileKey, _}
|
||||||
import docspell.store.qb.DSL._
|
import docspell.store.qb.DSL._
|
||||||
import docspell.store.qb._
|
import docspell.store.qb._
|
||||||
|
|
||||||
@ -20,7 +20,7 @@ import doobie.implicits._
|
|||||||
*/
|
*/
|
||||||
case class RAttachmentSource(
|
case class RAttachmentSource(
|
||||||
id: Ident, // same as RAttachment.id
|
id: Ident, // same as RAttachment.id
|
||||||
fileId: Ident,
|
fileId: FileKey,
|
||||||
name: Option[String],
|
name: Option[String],
|
||||||
created: Timestamp
|
created: Timestamp
|
||||||
)
|
)
|
||||||
@ -30,7 +30,7 @@ object RAttachmentSource {
|
|||||||
val tableName = "attachment_source"
|
val tableName = "attachment_source"
|
||||||
|
|
||||||
val id = Column[Ident]("id", this)
|
val id = Column[Ident]("id", this)
|
||||||
val fileId = Column[Ident]("file_id", this)
|
val fileId = Column[FileKey]("file_id", this)
|
||||||
val name = Column[String]("filename", this)
|
val name = Column[String]("filename", this)
|
||||||
val created = Column[Timestamp]("created", this)
|
val created = Column[Timestamp]("created", this)
|
||||||
|
|
||||||
@ -50,7 +50,7 @@ object RAttachmentSource {
|
|||||||
def findById(attachId: Ident): ConnectionIO[Option[RAttachmentSource]] =
|
def findById(attachId: Ident): ConnectionIO[Option[RAttachmentSource]] =
|
||||||
run(select(T.all), from(T), T.id === attachId).query[RAttachmentSource].option
|
run(select(T.all), from(T), T.id === attachId).query[RAttachmentSource].option
|
||||||
|
|
||||||
def isSameFile(attachId: Ident, file: Ident): ConnectionIO[Boolean] =
|
def isSameFile(attachId: Ident, file: FileKey): ConnectionIO[Boolean] =
|
||||||
Select(count(T.id).s, from(T), T.id === attachId && T.fileId === file).build
|
Select(count(T.id).s, from(T), T.id === attachId && T.fileId === file).build
|
||||||
.query[Int]
|
.query[Int]
|
||||||
.unique
|
.unique
|
||||||
|
@ -21,7 +21,7 @@ final case class RClassifierModel(
|
|||||||
id: Ident,
|
id: Ident,
|
||||||
cid: Ident,
|
cid: Ident,
|
||||||
name: String,
|
name: String,
|
||||||
fileId: Ident,
|
fileId: FileKey,
|
||||||
created: Timestamp
|
created: Timestamp
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
@ -30,7 +30,7 @@ object RClassifierModel {
|
|||||||
def createNew[F[_]: Sync](
|
def createNew[F[_]: Sync](
|
||||||
cid: Ident,
|
cid: Ident,
|
||||||
name: String,
|
name: String,
|
||||||
fileId: Ident
|
fileId: FileKey
|
||||||
): F[RClassifierModel] =
|
): F[RClassifierModel] =
|
||||||
for {
|
for {
|
||||||
id <- Ident.randomId[F]
|
id <- Ident.randomId[F]
|
||||||
@ -43,7 +43,7 @@ object RClassifierModel {
|
|||||||
val id = Column[Ident]("id", this)
|
val id = Column[Ident]("id", this)
|
||||||
val cid = Column[Ident]("cid", this)
|
val cid = Column[Ident]("cid", this)
|
||||||
val name = Column[String]("name", this)
|
val name = Column[String]("name", this)
|
||||||
val fileId = Column[Ident]("file_id", this)
|
val fileId = Column[FileKey]("file_id", this)
|
||||||
val created = Column[Timestamp]("created", this)
|
val created = Column[Timestamp]("created", this)
|
||||||
|
|
||||||
val all = NonEmptyList.of[Column[_]](id, cid, name, fileId, created)
|
val all = NonEmptyList.of[Column[_]](id, cid, name, fileId, created)
|
||||||
@ -61,7 +61,7 @@ object RClassifierModel {
|
|||||||
fr"${v.id},${v.cid},${v.name},${v.fileId},${v.created}"
|
fr"${v.id},${v.cid},${v.name},${v.fileId},${v.created}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def updateFile(coll: Ident, name: String, fid: Ident): ConnectionIO[Int] =
|
def updateFile(coll: Ident, name: String, fid: FileKey): ConnectionIO[Int] =
|
||||||
for {
|
for {
|
||||||
now <- Timestamp.current[ConnectionIO]
|
now <- Timestamp.current[ConnectionIO]
|
||||||
n <- DML.update(
|
n <- DML.update(
|
||||||
|
@ -9,7 +9,7 @@ package docspell.store.records
|
|||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common.{FileKey, _}
|
||||||
import docspell.store.qb.DSL._
|
import docspell.store.qb.DSL._
|
||||||
import docspell.store.qb._
|
import docspell.store.qb._
|
||||||
|
|
||||||
@ -18,7 +18,7 @@ import doobie.implicits._
|
|||||||
import scodec.bits.ByteVector
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
final case class RFileMeta(
|
final case class RFileMeta(
|
||||||
id: Ident,
|
id: FileKey,
|
||||||
created: Timestamp,
|
created: Timestamp,
|
||||||
mimetype: MimeType,
|
mimetype: MimeType,
|
||||||
length: ByteSize,
|
length: ByteSize,
|
||||||
@ -29,7 +29,7 @@ object RFileMeta {
|
|||||||
final case class Table(alias: Option[String]) extends TableDef {
|
final case class Table(alias: Option[String]) extends TableDef {
|
||||||
val tableName = "filemeta"
|
val tableName = "filemeta"
|
||||||
|
|
||||||
val id = Column[Ident]("file_id", this)
|
val id = Column[FileKey]("file_id", this)
|
||||||
val timestamp = Column[Timestamp]("created", this)
|
val timestamp = Column[Timestamp]("created", this)
|
||||||
val mimetype = Column[MimeType]("mimetype", this)
|
val mimetype = Column[MimeType]("mimetype", this)
|
||||||
val length = Column[ByteSize]("length", this)
|
val length = Column[ByteSize]("length", this)
|
||||||
@ -47,10 +47,10 @@ object RFileMeta {
|
|||||||
def insert(r: RFileMeta): ConnectionIO[Int] =
|
def insert(r: RFileMeta): ConnectionIO[Int] =
|
||||||
DML.insert(T, T.all, fr"${r.id},${r.created},${r.mimetype},${r.length},${r.checksum}")
|
DML.insert(T, T.all, fr"${r.id},${r.created},${r.mimetype},${r.length},${r.checksum}")
|
||||||
|
|
||||||
def findById(fid: Ident): ConnectionIO[Option[RFileMeta]] =
|
def findById(fid: FileKey): ConnectionIO[Option[RFileMeta]] =
|
||||||
run(select(T.all), from(T), T.id === fid).query[RFileMeta].option
|
run(select(T.all), from(T), T.id === fid).query[RFileMeta].option
|
||||||
|
|
||||||
def findByIds(ids: List[Ident]): ConnectionIO[Vector[RFileMeta]] =
|
def findByIds(ids: List[FileKey]): ConnectionIO[Vector[RFileMeta]] =
|
||||||
NonEmptyList.fromList(ids) match {
|
NonEmptyList.fromList(ids) match {
|
||||||
case Some(nel) =>
|
case Some(nel) =>
|
||||||
run(select(T.all), from(T), T.id.in(nel)).query[RFileMeta].to[Vector]
|
run(select(T.all), from(T), T.id.in(nel)).query[RFileMeta].to[Vector]
|
||||||
@ -58,11 +58,11 @@ object RFileMeta {
|
|||||||
Vector.empty[RFileMeta].pure[ConnectionIO]
|
Vector.empty[RFileMeta].pure[ConnectionIO]
|
||||||
}
|
}
|
||||||
|
|
||||||
def findMime(fid: Ident): ConnectionIO[Option[MimeType]] =
|
def findMime(fid: FileKey): ConnectionIO[Option[MimeType]] =
|
||||||
run(select(T.mimetype), from(T), T.id === fid)
|
run(select(T.mimetype), from(T), T.id === fid)
|
||||||
.query[MimeType]
|
.query[MimeType]
|
||||||
.option
|
.option
|
||||||
|
|
||||||
def delete(id: Ident): ConnectionIO[Int] =
|
def delete(id: FileKey): ConnectionIO[Int] =
|
||||||
DML.delete(T, T.id === id)
|
DML.delete(T, T.id === id)
|
||||||
}
|
}
|
||||||
|
@ -11,7 +11,7 @@ import javax.sql.DataSource
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
|
|
||||||
import docspell.common.LenientUri
|
import docspell.common.LenientUri
|
||||||
import docspell.store.file.FileStore
|
import docspell.store.file.FileRepository
|
||||||
import docspell.store.impl.StoreImpl
|
import docspell.store.impl.StoreImpl
|
||||||
import docspell.store.migrate.FlywayMigrate
|
import docspell.store.migrate.FlywayMigrate
|
||||||
|
|
||||||
@ -67,7 +67,8 @@ object StoreFixture {
|
|||||||
for {
|
for {
|
||||||
ds <- dataSource(jdbc)
|
ds <- dataSource(jdbc)
|
||||||
xa <- makeXA(ds)
|
xa <- makeXA(ds)
|
||||||
store = new StoreImpl[IO](FileStore[IO](xa, ds, 64 * 1024), jdbc, xa)
|
fr = FileRepository.genericJDBC[IO](xa, ds, 64 * 1024)
|
||||||
|
store = new StoreImpl[IO](fr, jdbc, xa)
|
||||||
_ <- Resource.eval(store.migrate)
|
_ <- Resource.eval(store.migrate)
|
||||||
} yield store
|
} yield store
|
||||||
}
|
}
|
||||||
|
@ -282,7 +282,8 @@ object Dependencies {
|
|||||||
|
|
||||||
val binny = Seq(
|
val binny = Seq(
|
||||||
"com.github.eikek" %% "binny-core" % BinnyVersion,
|
"com.github.eikek" %% "binny-core" % BinnyVersion,
|
||||||
"com.github.eikek" %% "binny-jdbc" % BinnyVersion
|
"com.github.eikek" %% "binny-jdbc" % BinnyVersion,
|
||||||
|
"com.github.eikek" %% "binny-minio" % BinnyVersion
|
||||||
)
|
)
|
||||||
|
|
||||||
// https://github.com/flyway/flyway
|
// https://github.com/flyway/flyway
|
||||||
|
Reference in New Issue
Block a user