mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-06-22 02:18:26 +00:00
Update binny to 0.6.0
This commit is contained in:
@ -60,7 +60,7 @@ object Store {
|
||||
ds.setDriverClassName(jdbc.dbms.driverClass)
|
||||
}
|
||||
xa = HikariTransactor(ds, connectEC)
|
||||
fr = FileRepository.apply(xa, ds, fileRepoConfig, true)
|
||||
fr = FileRepository(xa, ds, fileRepoConfig, true)
|
||||
st = new StoreImpl[F](fr, jdbc, schemaCfg, ds, xa)
|
||||
_ <- Resource.eval(st.migrate)
|
||||
} yield st
|
||||
|
@ -18,71 +18,52 @@ import binny._
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
|
||||
private[file] trait AttributeStore[F[_]] extends BinaryAttributeStore[F] {
|
||||
def findMeta(id: BinaryId): OptionT[F, RFileMeta]
|
||||
trait AttributeStore[F[_]] {
|
||||
def saveAttr(key: FileKey, attrs: BinaryAttributes): F[Unit]
|
||||
|
||||
def deleteAttr(key: FileKey): F[Boolean]
|
||||
|
||||
def findMeta(key: FileKey): OptionT[F, RFileMeta]
|
||||
}
|
||||
|
||||
private[file] object AttributeStore {
|
||||
def empty[F[_]: Applicative]: AttributeStore[F] =
|
||||
new AttributeStore[F] {
|
||||
val delegate = BinaryAttributeStore.empty[F]
|
||||
override def saveAttr(key: FileKey, attrs: BinaryAttributes) = ().pure[F]
|
||||
|
||||
def findMeta(id: BinaryId) =
|
||||
OptionT.none
|
||||
override def deleteAttr(key: FileKey) = false.pure[F]
|
||||
|
||||
def saveAttr(id: BinaryId, attrs: F[BinaryAttributes]) =
|
||||
delegate.saveAttr(id, attrs)
|
||||
|
||||
def deleteAttr(id: BinaryId) =
|
||||
delegate.deleteAttr(id)
|
||||
|
||||
def findAttr(id: BinaryId) =
|
||||
delegate.findAttr(id)
|
||||
override def findMeta(key: FileKey) = OptionT.none[F, RFileMeta]
|
||||
}
|
||||
|
||||
def apply[F[_]: Sync](xa: Transactor[F]): AttributeStore[F] =
|
||||
new Impl[F](xa)
|
||||
|
||||
final private class Impl[F[_]: Sync](xa: Transactor[F]) extends AttributeStore[F] {
|
||||
def saveAttr(id: BinaryId, attrs: F[BinaryAttributes]): F[Unit] =
|
||||
def saveAttr(key: FileKey, attrs: BinaryAttributes): F[Unit] =
|
||||
for {
|
||||
now <- Timestamp.current[F]
|
||||
a <- attrs
|
||||
fileKey <- makeFileKey(id)
|
||||
fm = RFileMeta(
|
||||
fileKey,
|
||||
key,
|
||||
now,
|
||||
MimeType.parse(a.contentType.contentType).getOrElse(MimeType.octetStream),
|
||||
ByteSize(a.length),
|
||||
a.sha256
|
||||
MimeType.parse(attrs.contentType.contentType).getOrElse(MimeType.octetStream),
|
||||
ByteSize(attrs.length),
|
||||
attrs.sha256
|
||||
)
|
||||
_ <- RFileMeta.insert(fm).transact(xa)
|
||||
} yield ()
|
||||
|
||||
def deleteAttr(id: BinaryId): F[Boolean] =
|
||||
makeFileKey(id).flatMap(fileKey =>
|
||||
RFileMeta.delete(fileKey).transact(xa).map(_ > 0)
|
||||
)
|
||||
def deleteAttr(key: FileKey): F[Boolean] =
|
||||
RFileMeta.delete(key).transact(xa).map(_ > 0)
|
||||
|
||||
def findAttr(id: BinaryId): OptionT[F, BinaryAttributes] =
|
||||
findMeta(id).map(fm =>
|
||||
BinaryAttributes(
|
||||
fm.checksum,
|
||||
SimpleContentType(fm.mimetype.asString),
|
||||
fm.length.bytes
|
||||
)
|
||||
)
|
||||
def findMeta(key: FileKey): OptionT[F, RFileMeta] =
|
||||
OptionT(RFileMeta.findById(key).transact(xa))
|
||||
|
||||
def findMeta(id: BinaryId): OptionT[F, RFileMeta] =
|
||||
OptionT(
|
||||
makeFileKey(id).flatMap(fileKey => RFileMeta.findById(fileKey).transact(xa))
|
||||
)
|
||||
|
||||
private def makeFileKey(binaryId: BinaryId): F[FileKey] =
|
||||
Sync[F]
|
||||
.pure(
|
||||
BinnyUtils.binaryIdToFileKey(binaryId).left.map(new IllegalStateException(_))
|
||||
)
|
||||
.rethrow
|
||||
// private def makeFileKey(binaryId: BinaryId): F[FileKey] =
|
||||
// Sync[F]
|
||||
// .pure(
|
||||
// BinnyUtils.binaryIdToFileKey(binaryId).left.map(new IllegalStateException(_))
|
||||
// )
|
||||
// .rethrow
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ import docspell.files.TikaMimetype
|
||||
import docspell.logging.Logger
|
||||
|
||||
import binny._
|
||||
import binny.fs.{FsBinaryStore, FsStoreConfig, PathMapping}
|
||||
import binny.fs._
|
||||
import binny.jdbc.{GenericJdbcStore, JdbcStoreConfig}
|
||||
import binny.minio.{MinioBinaryStore, MinioConfig, S3KeyMapping}
|
||||
import scodec.bits.ByteVector
|
||||
@ -95,7 +95,6 @@ object BinnyUtils {
|
||||
|
||||
def binaryStore[F[_]: Async](
|
||||
cfg: FileRepositoryConfig,
|
||||
attrStore: AttributeStore[F],
|
||||
ds: DataSource,
|
||||
logger: Logger[F]
|
||||
): BinaryStore[F] =
|
||||
@ -103,7 +102,7 @@ object BinnyUtils {
|
||||
case FileRepositoryConfig.Database(chunkSize) =>
|
||||
val jdbcConfig =
|
||||
JdbcStoreConfig("filechunk", chunkSize, BinnyUtils.TikaContentTypeDetect)
|
||||
GenericJdbcStore[F](ds, LoggerAdapter(logger), jdbcConfig, attrStore)
|
||||
GenericJdbcStore[F](ds, LoggerAdapter(logger), jdbcConfig)
|
||||
|
||||
case FileRepositoryConfig.S3(endpoint, accessKey, secretKey, bucket, chunkSize) =>
|
||||
val keyMapping = S3KeyMapping.constant(bucket)
|
||||
@ -111,16 +110,16 @@ object BinnyUtils {
|
||||
.default(endpoint, accessKey, secretKey, keyMapping)
|
||||
.copy(chunkSize = chunkSize, detect = BinnyUtils.TikaContentTypeDetect)
|
||||
|
||||
MinioBinaryStore[F](minioCfg, attrStore, LoggerAdapter(logger))
|
||||
MinioBinaryStore[F](minioCfg, LoggerAdapter(logger))
|
||||
|
||||
case FileRepositoryConfig.Directory(path, chunkSize) =>
|
||||
val fsConfig = FsStoreConfig(
|
||||
path,
|
||||
BinnyUtils.TikaContentTypeDetect,
|
||||
FsStoreConfig.OverwriteMode.Fail,
|
||||
OverwriteMode.Fail,
|
||||
BinnyUtils.pathMapping,
|
||||
chunkSize
|
||||
)
|
||||
FsBinaryStore[F](fsConfig, LoggerAdapter(logger), attrStore)
|
||||
FsBinaryStore[F](fsConfig, LoggerAdapter(logger))
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ import fs2._
|
||||
|
||||
import docspell.common._
|
||||
|
||||
import binny.{BinaryAttributeStore, BinaryId, BinaryStore}
|
||||
import binny.{BinaryId, BinaryStore}
|
||||
import doobie.Transactor
|
||||
|
||||
trait FileRepository[F[_]] {
|
||||
@ -45,17 +45,16 @@ object FileRepository {
|
||||
else AttributeStore.empty[F]
|
||||
val log = docspell.logging.getLogger[F]
|
||||
val keyFun: FileKey => BinaryId = BinnyUtils.fileKeyToBinaryId
|
||||
val binStore: BinaryStore[F] = BinnyUtils.binaryStore(cfg, attrStore, ds, log)
|
||||
|
||||
val binStore: BinaryStore[F] = BinnyUtils.binaryStore(cfg, ds, log)
|
||||
new FileRepositoryImpl[F](cfg, binStore, attrStore, keyFun)
|
||||
}
|
||||
|
||||
def getDelegate[F[_]](
|
||||
repo: FileRepository[F]
|
||||
): Option[(BinaryStore[F], BinaryAttributeStore[F])] =
|
||||
): Option[BinaryStore[F]] =
|
||||
repo match {
|
||||
case n: FileRepositoryImpl[F] =>
|
||||
Some((n.bs, n.attrStore))
|
||||
Some(n.bs)
|
||||
|
||||
case _ =>
|
||||
None
|
||||
|
@ -30,14 +30,14 @@ final class FileRepositoryImpl[F[_]: Sync](
|
||||
|
||||
def findMeta(key: FileKey): F[Option[FileMetadata]] =
|
||||
attrStore
|
||||
.findMeta(keyFun(key))
|
||||
.findMeta(key)
|
||||
.map(rfm =>
|
||||
FileMetadata(rfm.id, rfm.created, rfm.mimetype, rfm.length, rfm.checksum)
|
||||
)
|
||||
.value
|
||||
|
||||
def delete(key: FileKey): F[Unit] =
|
||||
bs.delete(keyFun(key))
|
||||
bs.delete(keyFun(key)) *> attrStore.deleteAttr(key).void
|
||||
|
||||
def save(
|
||||
collective: Ident,
|
||||
@ -48,9 +48,15 @@ final class FileRepositoryImpl[F[_]: Sync](
|
||||
in =>
|
||||
Stream
|
||||
.eval(randomKey(collective, category))
|
||||
.flatMap(fkey =>
|
||||
in.through(bs.insertWith(keyFun(fkey), fhint)) ++ Stream.emit(fkey)
|
||||
)
|
||||
.flatMap(fkey => in.through(bs.insertWith(keyFun(fkey))) ++ Stream.emit(fkey))
|
||||
.evalTap { key =>
|
||||
val bid = keyFun(key)
|
||||
bs.computeAttr(bid, fhint)
|
||||
.run(AttributeName.all)
|
||||
.semiflatMap(attr => attrStore.saveAttr(key, attr))
|
||||
.value
|
||||
.void
|
||||
}
|
||||
}
|
||||
|
||||
def randomKey(
|
||||
|
Reference in New Issue
Block a user