Adopt backend to collective-id

This commit is contained in:
eikek
2022-08-04 11:03:27 +02:00
parent 26d7c91266
commit 53d92c4a26
94 changed files with 1468 additions and 833 deletions

View File

@ -123,8 +123,8 @@ trait MigrationTasks {
private def saveChannel(ch: Channel, account: AccountId): ConnectionIO[ChannelRef] =
(for {
newId <- OptionT.liftF(Ident.randomId[ConnectionIO])
userData <- OptionT(QLogin.findUser(account))
userId = userData.account.userId
userData <- OptionT(QLogin.findAccount(account))
userId = userData.userId
r <- RNotificationChannel.fromChannel(ch, newId, userId)
_ <- OptionT.liftF(RNotificationChannel.insert(r))
_ <- OptionT.liftF(
@ -174,8 +174,8 @@ trait MigrationTasks {
}
for {
userData <- OptionT(QLogin.findUser(old.account))
userId = userData.account.userId
userData <- OptionT(QLogin.findAccount(old.account))
userId = userData.userId
id <- OptionT.liftF(Ident.randomId[ConnectionIO])
now <- OptionT.liftF(Timestamp.current[ConnectionIO])
chName = Some("migrate notify items")

View File

@ -0,0 +1,35 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration.data
import docspell.common._
import io.circe.generic.semiauto._
import io.circe.{Decoder, Encoder}
/** Arguments for the `AllPreviewsTask` that submits tasks to generates a preview image
* for attachments.
*
* It can replace the current preview image or only generate one, if it is missing. If no
* collective is specified, it considers all attachments.
*
* @deprecated
* This structure has been replaced to use a `CollectiveId`
*/
case class AllPreviewsArgs(
collective: Option[Ident],
storeMode: MakePreviewArgs.StoreMode
)
object AllPreviewsArgs {
val taskName = Ident.unsafe("all-previews")
implicit val jsonEncoder: Encoder[AllPreviewsArgs] =
deriveEncoder[AllPreviewsArgs]
implicit val jsonDecoder: Decoder[AllPreviewsArgs] =
deriveDecoder[AllPreviewsArgs]
}

View File

@ -0,0 +1,34 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration.data
import docspell.common._
import io.circe._
import io.circe.generic.semiauto._
/** Arguments for the task that finds all pdf files that have not been converted and
* submits for each a job that will convert the file using ocrmypdf.
*
* If the `collective` argument is present, then this task and the ones that are
* submitted by this task run in the realm of the collective (and only their files are
* considered). If it is empty, it is a system task and all files are considered.
*
* @deprecated
* replaced with same using `CollectiveId`
*/
case class ConvertAllPdfArgs(collective: Option[Ident])
object ConvertAllPdfArgs {
val taskName = Ident.unsafe("submit-pdf-migration-tasks")
implicit val jsonDecoder: Decoder[ConvertAllPdfArgs] =
deriveDecoder[ConvertAllPdfArgs]
implicit val jsonEncoder: Encoder[ConvertAllPdfArgs] =
deriveEncoder[ConvertAllPdfArgs]
}

View File

@ -0,0 +1,52 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration.data
import docspell.common._
import docspell.common.syntax.all._
import com.github.eikek.calev.CalEvent
import io.circe._
import io.circe.generic.semiauto._
/** Arguments to the empty-trash task.
*
* This task is run periodically to really delete all soft-deleted items. These are items
* with state `ItemState.Deleted`.
*
* @deprecated
* This structure has been changed to use a `CollectiveId`
*/
case class EmptyTrashArgs(
collective: Ident,
minAge: Duration
) {
def makeSubject: String =
s"Empty Trash: Remove older than ${minAge.toJava}"
def periodicTaskId: Ident =
EmptyTrashArgs.periodicTaskId(collective)
}
object EmptyTrashArgs {
val taskName = Ident.unsafe("empty-trash")
val defaultSchedule = CalEvent.unsafe("*-*-1/7 03:00:00 UTC")
def periodicTaskId(coll: Ident): Ident =
Ident.unsafe(s"docspell") / taskName / coll
implicit val jsonEncoder: Encoder[EmptyTrashArgs] =
deriveEncoder[EmptyTrashArgs]
implicit val jsonDecoder: Decoder[EmptyTrashArgs] =
deriveDecoder[EmptyTrashArgs]
def parse(str: String): Either[Throwable, EmptyTrashArgs] =
str.parseJsonAs[EmptyTrashArgs]
}

View File

@ -0,0 +1,32 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration.data
import docspell.common._
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
import io.circe.{Decoder, Encoder}
/** Arguments to submit a task that runs addons configured for some existing item.
*
* If `addonTaskIds` is non empty, only these addon tasks are run. Otherwise all addon
* tasks that are configured for 'existing-item' are run.
*
* @deprecated
* replaced with version using a `CollectiveId`
*/
final case class ItemAddonTaskArgs(
collective: Ident,
itemId: Ident,
addonRunConfigs: Set[Ident]
)
object ItemAddonTaskArgs {
val taskName: Ident = Ident.unsafe("addon-existing-item")
implicit val jsonDecoder: Decoder[ItemAddonTaskArgs] = deriveDecoder
implicit val jsonEncoder: Encoder[ItemAddonTaskArgs] = deriveEncoder
}

View File

@ -0,0 +1,45 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration.data
import docspell.common._
import docspell.common.syntax.all._
import io.circe._
import io.circe.generic.semiauto._
/** Arguments to the classify-item task.
*
* This task is run periodically and learns from existing documents to create a model for
* predicting tags of new documents. The user must give a tag category as a subset of
* possible tags.
*
* @deprecated
* This structure has been replaced to use a `CollectiveId`
*/
case class LearnClassifierArgs(
collective: Ident
) {
def makeSubject: String =
"Learn tags"
}
object LearnClassifierArgs {
val taskName = Ident.unsafe("learn-classifier")
implicit val jsonEncoder: Encoder[LearnClassifierArgs] =
deriveEncoder[LearnClassifierArgs]
implicit val jsonDecoder: Decoder[LearnClassifierArgs] =
deriveDecoder[LearnClassifierArgs]
def parse(str: String): Either[Throwable, LearnClassifierArgs] =
str.parseJsonAs[LearnClassifierArgs]
}

View File

@ -0,0 +1,82 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration.data
import ProcessItemArgs._
import docspell.common._
import docspell.common.syntax.all._
import io.circe._
import io.circe.generic.semiauto._
/** Arguments to the process-item task.
*
* This task is run for each new file to create a new item from it or to add this file as
* an attachment to an existing item.
*
* If the `itemId` is set to some value, the item is tried to load to amend with the
* given files. Otherwise a new item is created.
*
* It is also re-used by the 'ReProcessItem' task.
*
* @deprecated
* This is an old structure where the collective id was an `Ident` which is now the
* collective name. It is used to migrate database records to the new structure (same
* name in commons package)
*/
case class ProcessItemArgs(meta: ProcessMeta, files: List[File]) {
def makeSubject: String =
files.flatMap(_.name) match {
case Nil => s"${meta.sourceAbbrev}: No files supplied"
case n :: Nil => n
case n1 :: n2 :: Nil => s"$n1, $n2"
case _ => s"${files.size} files from ${meta.sourceAbbrev}"
}
def isNormalProcessing: Boolean =
!meta.reprocess
}
object ProcessItemArgs {
val taskName = Ident.unsafe("process-item")
val multiUploadTaskName = Ident.unsafe("multi-upload-process")
case class ProcessMeta(
collective: Ident,
itemId: Option[Ident],
language: Language,
direction: Option[Direction],
sourceAbbrev: String,
folderId: Option[Ident],
validFileTypes: Seq[MimeType],
skipDuplicate: Boolean,
fileFilter: Option[Glob],
tags: Option[List[String]],
reprocess: Boolean,
attachmentsOnly: Option[Boolean]
)
object ProcessMeta {
implicit val jsonEncoder: Encoder[ProcessMeta] = deriveEncoder[ProcessMeta]
implicit val jsonDecoder: Decoder[ProcessMeta] = deriveDecoder[ProcessMeta]
}
case class File(name: Option[String], fileMetaId: FileKey)
object File {
implicit val jsonEncoder: Encoder[File] = deriveEncoder[File]
implicit val jsonDecoder: Decoder[File] = deriveDecoder[File]
}
implicit val jsonEncoder: Encoder[ProcessItemArgs] = deriveEncoder[ProcessItemArgs]
implicit val jsonDecoder: Decoder[ProcessItemArgs] = deriveDecoder[ProcessItemArgs]
def parse(str: String): Either[Throwable, ProcessItemArgs] =
str.parseJsonAs[ProcessItemArgs]
}

View File

@ -0,0 +1,34 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration.data
import docspell.common._
import io.circe._
import io.circe.generic.semiauto._
/** @deprecated
* This structure has been replaced with one using a `CollectiveId`
*/
final case class ReIndexTaskArgs(collective: Option[Ident])
object ReIndexTaskArgs {
val taskName = Ident.unsafe("full-text-reindex")
def tracker(args: ReIndexTaskArgs): Ident =
args.collective match {
case Some(cid) =>
cid / DocspellSystem.migrationTaskTracker
case None =>
DocspellSystem.migrationTaskTracker
}
implicit val jsonEncoder: Encoder[ReIndexTaskArgs] =
deriveEncoder[ReIndexTaskArgs]
implicit val jsonDecoder: Decoder[ReIndexTaskArgs] =
deriveDecoder[ReIndexTaskArgs]
}

View File

@ -0,0 +1,26 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package db.migration.data
import docspell.common._
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
import io.circe.{Decoder, Encoder}
/** @deprecated
* This has been replaced with a version using a `CollectiveId`
*
* @param collective
* @param addonTaskId
*/
final case class ScheduledAddonTaskArgs(collective: Ident, addonTaskId: Ident)
object ScheduledAddonTaskArgs {
val taskName: Ident = Ident.unsafe("addon-scheduled-task")
implicit val jsonDecoder: Decoder[ScheduledAddonTaskArgs] = deriveDecoder
implicit val jsonEncoder: Encoder[ScheduledAddonTaskArgs] = deriveEncoder
}

View File

@ -24,7 +24,7 @@ import scodec.bits.ByteVector
object BinnyUtils {
def fileKeyToBinaryId(fk: FileKey): BinaryId =
BinaryId(s"${fk.collective.id}/${fk.category.id.id}/${fk.id.id}")
BinaryId(s"${fk.collective.valueAsString}/${fk.category.id.id}/${fk.id.id}")
def fileKeyPartToPrefix(fkp: FileKeyPart): Option[String] =
fkp match {
@ -38,7 +38,7 @@ object BinnyUtils {
bid.id.split('/').toList match {
case cId :: catId :: fId :: Nil =>
for {
coll <- Ident.fromString(cId)
coll <- CollectiveId.fromString(cId)
cat <- FileCategory.fromString(catId)
file <- Ident.fromString(fId)
} yield FileKey(coll, cat, file)
@ -78,7 +78,7 @@ object BinnyUtils {
def toPath(base: Path, binaryId: BinaryId): Path = {
val fkey = unsafeBinaryIdToFileKey(binaryId)
base / fkey.collective.id / fkey.category.id.id / fkey.id.id / "file"
base / s"${fkey.collective.value}" / fkey.category.id.id / fkey.id.id / "file"
}
def toId(file: Path): Option[BinaryId] =
@ -87,7 +87,11 @@ object BinnyUtils {
cat <- id.parent
fcat <- FileCategory.fromString(cat.asId.id).toOption
coll <- cat.parent
fkey = FileKey(Ident.unsafe(coll.asId.id), fcat, Ident.unsafe(id.asId.id))
fkey = FileKey(
CollectiveId.unsafeFromString(coll.asId.id),
fcat,
Ident.unsafe(id.asId.id)
)
} yield fileKeyToBinaryId(fkey)
PathMapping(toPath)(toId)

View File

@ -26,7 +26,7 @@ trait FileRepository[F[_]] {
def delete(key: FileKey): F[Unit]
def save(
collective: Ident,
collective: CollectiveId,
category: FileCategory,
hint: MimeTypeHint
): Pipe[F, Byte, FileKey]

View File

@ -40,7 +40,7 @@ final class FileRepositoryImpl[F[_]: Sync](
bs.delete(keyFun(key)) *> attrStore.deleteAttr(key).void
def save(
collective: Ident,
collective: CollectiveId,
category: FileCategory,
hint: MimeTypeHint
): Pipe[F, Byte, FileKey] = {
@ -60,7 +60,7 @@ final class FileRepositoryImpl[F[_]: Sync](
}
def randomKey(
collective: Ident,
collective: CollectiveId,
category: FileCategory
): F[FileKey] =
BinaryId.random[F].map(bid => FileKey(collective, category, Ident.unsafe(bid.id)))

View File

@ -24,7 +24,7 @@ object FileUrlReader {
scheme = Nel.of(scheme),
authority = Some(""),
path = LenientUri.NonEmptyPath(
Nel.of(key.collective.id, key.category.id.id, key.id.id),
Nel.of(key.collective.valueAsString, key.category.id.id, key.id.id),
false
),
query = None,

View File

@ -666,7 +666,7 @@ object QItem extends FtsSupport {
final case class NameAndNotes(
id: Ident,
collective: Ident,
collective: CollectiveId,
folder: Option[Ident],
name: String,
notes: Option[String],

View File

@ -58,6 +58,18 @@ object QLogin {
def findUser(userId: Ident): ConnectionIO[Option[Data]] =
findUser0((user, _) => user.uid === userId)
/** Finds the account given a combination of login/user-id and coll-id/coll-name pair.
*/
def findAccount(acc: AccountId): ConnectionIO[Option[AccountInfo]] = {
val collIdOpt = acc.collective.id.toLongOption.map(CollectiveId(_))
findUser0((ut, ct) =>
(ut.login === acc.user || ut.uid === acc.user) && collIdOpt
.map(id => ct.id === id)
.getOrElse(ct.name === acc.collective)
)
.map(_.map(_.account))
}
def findByRememberMe(
rememberId: Ident,
minCreated: Timestamp

View File

@ -37,6 +37,7 @@ object RCollective {
val created = Column[Timestamp]("created", this)
val all = NonEmptyList.of[Column[_]](id, name, state, language, integration, created)
val allNoId = NonEmptyList.fromListUnsafe(all.tail)
}
def makeDefault(collName: Ident, created: Timestamp): RCollective =
@ -53,12 +54,17 @@ object RCollective {
def as(alias: String): Table =
Table(Some(alias))
def insert(value: RCollective): ConnectionIO[Int] =
DML.insert(
T,
T.all,
fr"${value.id},${value.name},${value.state},${value.language},${value.integrationEnabled},${value.created}"
)
def insert(value: RCollective): ConnectionIO[CollectiveId] =
DML
.insertFragment(
T,
T.allNoId,
List(
fr"${value.name},${value.state},${value.language},${value.integrationEnabled},${value.created}"
)
)
.update
.withUniqueGeneratedKeys[CollectiveId](T.id.name)
def update(value: RCollective): ConnectionIO[Int] =
DML.update(
@ -139,6 +145,11 @@ object RCollective {
sql.query[RCollective].option
}
def findByName(cname: Ident): ConnectionIO[Option[RCollective]] = {
val sql = run(select(T.all), from(T), T.name === cname)
sql.query[RCollective].option
}
def findByItem(itemId: Ident): ConnectionIO[Option[RCollective]] = {
val i = RItem.as("i")
val c = RCollective.as("c")
@ -154,6 +165,11 @@ object RCollective {
sql.query[Int].unique.map(_ > 0)
}
def existsByName(name: Ident): ConnectionIO[Boolean] = {
val sql = Select(count(T.id).s, from(T), T.name === name).build
sql.query[Int].unique.map(_ > 0)
}
def findAll(order: Table => Column[_]): ConnectionIO[Vector[RCollective]] = {
val sql = Select(select(T.all), from(T)).orderBy(order(T))
sql.build.query[RCollective].to[Vector]

View File

@ -123,8 +123,8 @@ object RItem {
fr"${v.created},${v.updated},${v.notes},${v.folderId}"
)
def getCollective(itemId: Ident): ConnectionIO[Option[Ident]] =
Select(T.cid.s, from(T), T.id === itemId).build.query[Ident].option
def getCollective(itemId: Ident): ConnectionIO[Option[CollectiveId]] =
Select(T.cid.s, from(T), T.id === itemId).build.query[CollectiveId].option
def updateAll(item: RItem): ConnectionIO[Int] =
for {

View File

@ -60,7 +60,7 @@ object RSentMail {
def forItem(
itemId: Ident,
accId: AccountId,
userId: Ident,
messageId: String,
sender: MailAddress,
connName: Ident,
@ -69,10 +69,9 @@ object RSentMail {
body: String
): OptionT[ConnectionIO, (RSentMail, RSentMailItem)] =
for {
user <- OptionT(RUser.findByAccount(accId))
sm <- OptionT.liftF(
RSentMail[ConnectionIO](
user.uid,
userId,
messageId,
sender,
connName,

View File

@ -94,17 +94,21 @@ object RShare {
else Nil)
)
def findOne(id: Ident, cid: CollectiveId): OptionT[ConnectionIO, (RShare, RUser)] = {
def findOne(
id: Ident,
cid: CollectiveId
): OptionT[ConnectionIO, (RShare, AccountInfo)] = {
val s = RShare.as("s")
val u = RUser.as("u")
val c = RCollective.as("c")
OptionT(
Select(
select(s.all, u.all),
from(s).innerJoin(u, u.uid === s.userId),
select(s.all, NonEmptyList.of(c.id, c.name, u.uid, u.login)),
from(s).innerJoin(u, u.uid === s.userId).innerJoin(c, c.id === u.cid),
s.id === id && u.cid === cid
).build
.query[(RShare, RUser)]
.query[(RShare, AccountInfo)]
.option
)
}
@ -115,20 +119,21 @@ object RShare {
def findActive(
id: Ident,
current: Timestamp
): OptionT[ConnectionIO, (RShare, RUser)] = {
): OptionT[ConnectionIO, (RShare, AccountInfo)] = {
val s = RShare.as("s")
val u = RUser.as("u")
val c = RCollective.as("c")
OptionT(
Select(
select(s.all, u.all),
from(s).innerJoin(u, s.userId === u.uid),
select(s.all, NonEmptyList.of(c.id, c.name, u.uid, u.login)),
from(s).innerJoin(u, s.userId === u.uid).innerJoin(c, c.id === u.cid),
activeCondition(s, id, current)
).build.query[(RShare, RUser)].option
).build.query[(RShare, AccountInfo)].option
)
}
def findCurrentActive(id: Ident): OptionT[ConnectionIO, (RShare, RUser)] =
def findCurrentActive(id: Ident): OptionT[ConnectionIO, (RShare, AccountInfo)] =
OptionT.liftF(Timestamp.current[ConnectionIO]).flatMap(now => findActive(id, now))
def findActivePassword(id: Ident): OptionT[ConnectionIO, Option[Password]] =
@ -159,21 +164,22 @@ object RShare {
cid: CollectiveId,
ownerLogin: Option[Ident],
q: Option[String]
): ConnectionIO[List[(RShare, RUser)]] = {
): ConnectionIO[List[(RShare, AccountInfo)]] = {
val s = RShare.as("s")
val u = RUser.as("u")
val c = RCollective.as("c")
val ownerQ = ownerLogin.map(name => u.login === name)
val nameQ = q.map(n => s.name.like(s"%$n%"))
Select(
select(s.all, u.all),
from(s).innerJoin(u, u.uid === s.userId),
select(s.all, NonEmptyList.of(c.id, c.name, u.uid, u.login)),
from(s).innerJoin(u, u.uid === s.userId).innerJoin(c, c.id === u.cid),
u.cid === cid &&? ownerQ &&? nameQ
)
.orderBy(s.publishedAt.desc)
.build
.query[(RShare, RUser)]
.query[(RShare, AccountInfo)]
.to[List]
}

View File

@ -125,8 +125,8 @@ object RSource {
private[records] def findEnabledSql(id: Ident): Fragment =
run(select(table.all), from(table), where(table.sid === id, table.enabled === true))
def findCollective(sourceId: Ident): ConnectionIO[Option[Ident]] =
run(select(table.cid), from(table), table.sid === sourceId).query[Ident].option
def findCollectiveId(sourceId: Ident): ConnectionIO[Option[CollectiveId]] =
run(select(table.cid), from(table), table.sid === sourceId).query[CollectiveId].option
def findAll(
coll: CollectiveId,

View File

@ -129,9 +129,20 @@ object RUser {
sql.query[RUser].option
}
def findById(userId: Ident): ConnectionIO[Option[RUser]] = {
def findById(userId: Ident, cid: Option[CollectiveId]): ConnectionIO[Option[RUser]] = {
val t = Table(None)
val sql = run(select(t.all), from(t), t.uid === userId)
val sql =
run(select(t.all), from(t), t.uid === userId &&? cid.map(id => t.cid === id))
sql.query[RUser].option
}
def findByLogin(
login: Ident,
cid: Option[CollectiveId]
): ConnectionIO[Option[RUser]] = {
val t = Table(None)
val sql =
run(select(t.all), from(t), t.login === login &&? cid.map(id => t.cid === id))
sql.query[RUser].option
}

View File

@ -77,8 +77,8 @@ object RUserEmail {
now
)
def fromAccount(
accId: AccountId,
def fromUser(
userId: Ident,
name: Ident,
smtpHost: String,
smtpPort: Option[Int],
@ -92,10 +92,9 @@ object RUserEmail {
for {
now <- OptionT.liftF(Timestamp.current[ConnectionIO])
id <- OptionT.liftF(Ident.randomId[ConnectionIO])
user <- OptionT(RUser.findByAccount(accId))
} yield RUserEmail(
id,
user.uid,
userId,
name,
smtpHost,
smtpPort,

View File

@ -75,8 +75,8 @@ object RUserImap {
now
)
def fromAccount(
accId: AccountId,
def fromUser(
userId: Ident,
name: Ident,
imapHost: String,
imapPort: Option[Int],
@ -89,10 +89,9 @@ object RUserImap {
for {
now <- OptionT.liftF(Timestamp.current[ConnectionIO])
id <- OptionT.liftF(Ident.randomId[ConnectionIO])
user <- OptionT(RUser.findByAccount(accId))
} yield RUserImap(
id,
user.uid,
userId,
name,
imapHost,
imapPort,

View File

@ -99,8 +99,8 @@ class TempFtsOpsTest extends DatabaseTest {
for {
today <- IO(LocalDate.now())
account <- store
.transact(QLogin.findUser(DocspellSystem.account))
.map(_.get.account)
.transact(QLogin.findAccount(DocspellSystem.account))
.map(_.get)
tempTable = ftsResults
.through(TempFtsOps.prepareTable(store.dbms, "fts_result"))
.compile