mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-06-22 02:18:26 +00:00
Experiment with addons
Addons allow to execute external programs in some context inside docspell. Currently it is possible to run them after processing files. Addons are provided by URLs to zip files.
This commit is contained in:
@ -0,0 +1,47 @@
|
||||
create table "addon_archive"(
|
||||
"id" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"file_id" varchar(254) not null,
|
||||
"original_url" varchar(2000),
|
||||
"name" varchar(254) not null,
|
||||
"version" varchar(254) not null,
|
||||
"description" text,
|
||||
"triggers" text not null,
|
||||
"created" timestamp not null,
|
||||
foreign key ("cid") references "collective"("cid"),
|
||||
foreign key ("file_id") references "filemeta"("file_id"),
|
||||
unique ("cid", "original_url"),
|
||||
unique ("cid", "name", "version")
|
||||
);
|
||||
|
||||
create table "addon_run_config"(
|
||||
"id" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"user_id" varchar(254),
|
||||
"name" varchar(254) not null,
|
||||
"enabled" boolean not null,
|
||||
"created" timestamp not null,
|
||||
foreign key ("cid") references "collective"("cid"),
|
||||
foreign key ("user_id") references "user_"("uid")
|
||||
);
|
||||
|
||||
create table "addon_run_config_addon" (
|
||||
"id" varchar(254) not null primary key,
|
||||
"addon_run_config_id" varchar(254) not null,
|
||||
"addon_id" varchar(254) not null,
|
||||
"args" text not null,
|
||||
"position" int not null,
|
||||
foreign key ("addon_run_config_id") references "addon_run_config"("id") on delete cascade,
|
||||
foreign key ("addon_id") references "addon_archive"("id") on delete cascade
|
||||
);
|
||||
|
||||
create table "addon_run_config_trigger"(
|
||||
"id" varchar(254) not null primary key,
|
||||
"addon_run_config_id" varchar(254) not null,
|
||||
"triggers" varchar(254) not null,
|
||||
foreign key ("addon_run_config_id") references "addon_run_config"("id") on delete cascade,
|
||||
unique ("addon_run_config_id", "triggers")
|
||||
);
|
||||
|
||||
alter table "node"
|
||||
add column "server_secret" varchar;
|
@ -0,0 +1,47 @@
|
||||
create table `addon_archive`(
|
||||
`id` varchar(254) not null primary key,
|
||||
`cid` varchar(254) not null,
|
||||
`file_id` varchar(254) not null,
|
||||
`original_url` varchar(2000),
|
||||
`name` varchar(254) not null,
|
||||
`version` varchar(254) not null,
|
||||
`description` text,
|
||||
`triggers` text not null,
|
||||
`created` timestamp not null,
|
||||
foreign key (`cid`) references `collective`(`cid`),
|
||||
foreign key (`file_id`) references `filemeta`(`file_id`),
|
||||
unique (`cid`, `original_url`),
|
||||
unique (`cid`, `name`, `version`)
|
||||
);
|
||||
|
||||
create table `addon_run_config`(
|
||||
`id` varchar(254) not null primary key,
|
||||
`cid` varchar(254) not null,
|
||||
`user_id` varchar(254),
|
||||
`name` varchar(254) not null,
|
||||
`enabled` boolean not null,
|
||||
`created` timestamp not null,
|
||||
foreign key (`cid`) references `collective`(`cid`),
|
||||
foreign key (`user_id`) references `user_`(`uid`)
|
||||
);
|
||||
|
||||
create table `addon_run_config_addon` (
|
||||
`id` varchar(254) not null primary key,
|
||||
`addon_run_config_id` varchar(254) not null,
|
||||
`addon_id` varchar(254) not null,
|
||||
`args` text not null,
|
||||
`position` int not null,
|
||||
foreign key (`addon_run_config_id`) references `addon_run_config`(`id`) on delete cascade,
|
||||
foreign key (`addon_id`) references `addon_archive`(`id`) on delete cascade
|
||||
);
|
||||
|
||||
create table `addon_run_config_trigger`(
|
||||
`id` varchar(254) not null primary key,
|
||||
`addon_run_config_id` varchar(254) not null,
|
||||
`triggers` varchar(254) not null,
|
||||
foreign key (`addon_run_config_id`) references `addon_run_config`(`id`) on delete cascade,
|
||||
unique (`addon_run_config_id`, `triggers`)
|
||||
);
|
||||
|
||||
alter table `node`
|
||||
add column (`server_secret` varchar(2000));
|
@ -0,0 +1,47 @@
|
||||
create table "addon_archive"(
|
||||
"id" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"file_id" varchar(254) not null,
|
||||
"original_url" varchar(2000),
|
||||
"name" varchar(254) not null,
|
||||
"version" varchar(254) not null,
|
||||
"description" text,
|
||||
"triggers" text not null,
|
||||
"created" timestamp not null,
|
||||
foreign key ("cid") references "collective"("cid"),
|
||||
foreign key ("file_id") references "filemeta"("file_id"),
|
||||
unique ("cid", "original_url"),
|
||||
unique ("cid", "name", "version")
|
||||
);
|
||||
|
||||
create table "addon_run_config"(
|
||||
"id" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"user_id" varchar(254),
|
||||
"name" varchar(254) not null,
|
||||
"enabled" boolean not null,
|
||||
"created" timestamp not null,
|
||||
foreign key ("cid") references "collective"("cid"),
|
||||
foreign key ("user_id") references "user_"("uid")
|
||||
);
|
||||
|
||||
create table "addon_run_config_addon" (
|
||||
"id" varchar(254) not null primary key,
|
||||
"addon_run_config_id" varchar(254) not null,
|
||||
"addon_id" varchar(254) not null,
|
||||
"args" text not null,
|
||||
"position" int not null,
|
||||
foreign key ("addon_run_config_id") references "addon_run_config"("id") on delete cascade,
|
||||
foreign key ("addon_id") references "addon_archive"("id") on delete cascade
|
||||
);
|
||||
|
||||
create table "addon_run_config_trigger"(
|
||||
"id" varchar(254) not null primary key,
|
||||
"addon_run_config_id" varchar(254) not null,
|
||||
"triggers" varchar(254) not null,
|
||||
foreign key ("addon_run_config_id") references "addon_run_config"("id") on delete cascade,
|
||||
unique ("addon_run_config_id", "triggers")
|
||||
);
|
||||
|
||||
alter table "node"
|
||||
add column "server_secret" varchar;
|
@ -0,0 +1,64 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.store.file
|
||||
|
||||
import cats.data.{NonEmptyList => Nel}
|
||||
import cats.effect.Sync
|
||||
import cats.syntax.all._
|
||||
import fs2.Stream
|
||||
|
||||
import docspell.common.{FileKey, LenientUri, UrlReader}
|
||||
|
||||
import binny.BinaryId
|
||||
|
||||
object FileUrlReader {
|
||||
|
||||
private val scheme: String = "docspell-file"
|
||||
|
||||
def url(key: FileKey): LenientUri =
|
||||
LenientUri(
|
||||
scheme = Nel.of(scheme),
|
||||
authority = Some(""),
|
||||
path = LenientUri.NonEmptyPath(
|
||||
Nel.of(key.collective.id, key.category.id.id, key.id.id)
|
||||
),
|
||||
query = None,
|
||||
fragment = None
|
||||
)
|
||||
|
||||
def apply[F[_]: Sync](repo: FileRepository[F]): UrlReader[F] =
|
||||
UrlReader.instance { url =>
|
||||
url.scheme.head match {
|
||||
case `scheme` =>
|
||||
Stream
|
||||
.emit(urlToFileKey(url))
|
||||
.covary[F]
|
||||
.rethrow
|
||||
.evalMap(key => repo.findMeta(key).map(m => (key, m)))
|
||||
.flatMap {
|
||||
case _ -> Some(m) => repo.getBytes(m.id)
|
||||
case key -> None =>
|
||||
Stream.raiseError(
|
||||
new NoSuchElementException(
|
||||
s"File not found for url '${url.asString}' (key=$key)"
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
case _ =>
|
||||
UrlReader.defaultReader[F].apply(url)
|
||||
}
|
||||
}
|
||||
|
||||
private[file] def urlToFileKey(url: LenientUri): Either[Throwable, FileKey] =
|
||||
BinnyUtils
|
||||
.binaryIdToFileKey(BinaryId(url.host match {
|
||||
case Some(h) if h.nonEmpty => s"$h${url.path.asString}"
|
||||
case _ => url.path.segments.mkString("/")
|
||||
}))
|
||||
.leftMap(new IllegalArgumentException(_))
|
||||
}
|
@ -9,6 +9,7 @@ package docspell.store.impl
|
||||
import java.time.format.DateTimeFormatter
|
||||
import java.time.{Instant, LocalDate}
|
||||
|
||||
import docspell.addons.AddonTriggerType
|
||||
import docspell.common._
|
||||
import docspell.common.syntax.all._
|
||||
import docspell.jsonminiq.JsonMiniQuery
|
||||
@ -31,9 +32,9 @@ trait DoobieMeta extends EmilDoobieMeta {
|
||||
|
||||
implicit val sqlLogging: LogHandler = LogHandler {
|
||||
case e @ Success(_, _, _, _) =>
|
||||
DoobieMeta.logger.trace("SQL " + e)
|
||||
DoobieMeta.logger.trace(s"SQL: $e")
|
||||
case e =>
|
||||
DoobieMeta.logger.error(s"SQL Failure: $e")
|
||||
DoobieMeta.logger.warn(s"SQL Failure: $e")
|
||||
}
|
||||
|
||||
def jsonMeta[A](implicit d: Decoder[A], e: Encoder[A]): Meta[A] =
|
||||
@ -41,6 +42,12 @@ trait DoobieMeta extends EmilDoobieMeta {
|
||||
e.apply(a).noSpaces
|
||||
)
|
||||
|
||||
implicit val metaAddonTriggerType: Meta[AddonTriggerType] =
|
||||
Meta[String].timap(AddonTriggerType.unsafeFromString)(_.name)
|
||||
|
||||
implicit val metaAddonTriggerTypeSet: Meta[Set[AddonTriggerType]] =
|
||||
jsonMeta[Set[AddonTriggerType]]
|
||||
|
||||
implicit val metaBinaryId: Meta[BinaryId] =
|
||||
Meta[String].timap(BinaryId.apply)(_.id)
|
||||
|
||||
|
@ -0,0 +1,31 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.store.queries
|
||||
|
||||
import docspell.common.BaseJsonCodecs._
|
||||
import docspell.common._
|
||||
|
||||
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||
import io.circe.{Decoder, Encoder}
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
/** Information about an attachment file (can be attachment-source or attachment) */
|
||||
case class AttachedFile(
|
||||
id: Ident,
|
||||
name: Option[String],
|
||||
position: Int,
|
||||
language: Option[Language],
|
||||
mimetype: MimeType,
|
||||
length: ByteSize,
|
||||
checksum: ByteVector
|
||||
)
|
||||
|
||||
object AttachedFile {
|
||||
|
||||
implicit val jsonDecoder: Decoder[AttachedFile] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[AttachedFile] = deriveEncoder
|
||||
}
|
@ -22,10 +22,37 @@ import doobie._
|
||||
|
||||
object QAttachment {
|
||||
private val a = RAttachment.as("a")
|
||||
private val as = RAttachmentSource.as("ats")
|
||||
private val item = RItem.as("i")
|
||||
private val am = RAttachmentMeta.as("am")
|
||||
private val c = RCollective.as("c")
|
||||
private val im = RItemProposal.as("im")
|
||||
private val fm = RFileMeta.as("fm")
|
||||
|
||||
def attachmentSourceFile(itemId: Ident): ConnectionIO[List[AttachedFile]] =
|
||||
Select(
|
||||
combineNel(
|
||||
select(as.id, as.name, a.position, am.language),
|
||||
select(fm.mimetype, fm.length, fm.checksum)
|
||||
),
|
||||
from(a)
|
||||
.innerJoin(as, a.id === as.id)
|
||||
.innerJoin(fm, fm.id === as.fileId)
|
||||
.leftJoin(am, am.id === a.id),
|
||||
a.itemId === itemId
|
||||
).orderBy(a.position).build.query[AttachedFile].to[List]
|
||||
|
||||
def attachmentFile(itemId: Ident): ConnectionIO[List[AttachedFile]] =
|
||||
Select(
|
||||
combineNel(
|
||||
select(a.id, a.name, a.position, am.language),
|
||||
select(fm.mimetype, fm.length, fm.checksum)
|
||||
),
|
||||
from(a)
|
||||
.innerJoin(fm, fm.id === a.fileId)
|
||||
.leftJoin(am, am.id === a.id),
|
||||
a.itemId === itemId
|
||||
).orderBy(a.position).build.query[AttachedFile].to[List]
|
||||
|
||||
def deletePreview[F[_]: Sync](store: Store[F])(attachId: Ident): F[Int] = {
|
||||
val findPreview =
|
||||
@ -163,6 +190,17 @@ object QAttachment {
|
||||
q.query[RAttachmentMeta].option
|
||||
}
|
||||
|
||||
def getAttachmentMetaOfItem(itemId: Ident): ConnectionIO[Vector[RAttachmentMeta]] =
|
||||
Select(
|
||||
select(am.all),
|
||||
from(am)
|
||||
.innerJoin(a, a.id === am.id),
|
||||
a.itemId === itemId
|
||||
).orderBy(a.position.asc)
|
||||
.build
|
||||
.query[RAttachmentMeta]
|
||||
.to[Vector]
|
||||
|
||||
case class ContentAndName(
|
||||
id: Ident,
|
||||
item: Ident,
|
||||
@ -175,6 +213,7 @@ object QAttachment {
|
||||
def allAttachmentMetaAndName(
|
||||
coll: Option[Ident],
|
||||
itemIds: Option[Nel[Ident]],
|
||||
itemStates: Nel[ItemState],
|
||||
chunkSize: Int
|
||||
): Stream[ConnectionIO, ContentAndName] =
|
||||
Select(
|
||||
@ -192,7 +231,7 @@ object QAttachment {
|
||||
.innerJoin(item, item.id === a.itemId)
|
||||
.innerJoin(c, c.id === item.cid)
|
||||
).where(
|
||||
item.state.in(ItemState.validStates) &&?
|
||||
item.state.in(itemStates) &&?
|
||||
itemIds.map(ids => item.id.in(ids)) &&?
|
||||
coll.map(cid => item.cid === cid)
|
||||
).build
|
||||
|
@ -0,0 +1,155 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.store.records
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
import cats.syntax.all._
|
||||
import fs2.Stream
|
||||
|
||||
import docspell.addons.AddonTriggerType
|
||||
import docspell.common.{Ident, Timestamp}
|
||||
import docspell.store.qb.DSL._
|
||||
import docspell.store.qb._
|
||||
|
||||
import doobie._
|
||||
|
||||
case class AddonRunConfigData(
|
||||
runConfig: RAddonRunConfig,
|
||||
addons: List[RAddonRunConfigAddon],
|
||||
triggers: List[RAddonRunConfigTrigger]
|
||||
)
|
||||
|
||||
object AddonRunConfigData {
|
||||
|
||||
def findAll(
|
||||
cid: Ident,
|
||||
enabled: Option[Boolean] = None,
|
||||
trigger: Set[AddonTriggerType] = Set.empty,
|
||||
configIds: Set[Ident] = Set.empty
|
||||
): ConnectionIO[List[AddonRunConfigData]] =
|
||||
for {
|
||||
runConfigs <- RAddonRunConfig.findByCollective(cid, enabled, trigger, configIds)
|
||||
addons <- runConfigs.traverse(t =>
|
||||
RAddonRunConfigAddon.findByRunConfig(t.id).map(as => t.id -> as)
|
||||
)
|
||||
addonMap = addons.toMap
|
||||
triggers <- runConfigs.traverse(t =>
|
||||
RAddonRunConfigTrigger.findByRunConfig(t.id).map(ts => t.id -> ts)
|
||||
)
|
||||
triggerMap = triggers.toMap
|
||||
result = runConfigs.map(t =>
|
||||
AddonRunConfigData(t, addonMap(t.id), triggerMap(t.id))
|
||||
)
|
||||
} yield result
|
||||
|
||||
/** Inserts new, creating new identifiers */
|
||||
def insert(task: AddonRunConfigData): ConnectionIO[Ident] =
|
||||
for {
|
||||
tid <- Ident.randomId[ConnectionIO]
|
||||
now <- Timestamp.current[ConnectionIO]
|
||||
tr = task.runConfig.copy(id = tid, created = now)
|
||||
_ <- RAddonRunConfig.insert(tr)
|
||||
_ <- task.triggers.traverse { t =>
|
||||
Ident
|
||||
.randomId[ConnectionIO]
|
||||
.map(id => t.copy(id = id, runConfigId = tid))
|
||||
.flatMap(RAddonRunConfigTrigger.insert)
|
||||
}
|
||||
_ <- task.addons.traverse { a =>
|
||||
Ident
|
||||
.randomId[ConnectionIO]
|
||||
.map(id => a.copy(id = id, runConfigId = tid))
|
||||
.flatMap(RAddonRunConfigAddon.insert)
|
||||
}
|
||||
} yield tid
|
||||
|
||||
/** Updates the task, keeping its id but replacing all related objects */
|
||||
def update(task: AddonRunConfigData): ConnectionIO[Int] =
|
||||
for {
|
||||
n1 <- RAddonRunConfig.update(task.runConfig)
|
||||
_ <- RAddonRunConfigTrigger.deleteAllForConfig(task.runConfig.id)
|
||||
_ <- RAddonRunConfigAddon.deleteAllForConfig(task.runConfig.id)
|
||||
tts <- task.triggers.traverse { t =>
|
||||
Ident
|
||||
.randomId[ConnectionIO]
|
||||
.map(id => t.copy(id = id, runConfigId = task.runConfig.id))
|
||||
.flatMap(RAddonRunConfigTrigger.insert)
|
||||
}
|
||||
tas <- task.addons.traverse { a =>
|
||||
Ident
|
||||
.randomId[ConnectionIO]
|
||||
.map(id => a.copy(id = id, runConfigId = task.runConfig.id))
|
||||
.flatMap(RAddonRunConfigAddon.insert)
|
||||
}
|
||||
} yield n1 + tts.sum + tas.sum
|
||||
|
||||
def findEnabledRef(
|
||||
cid: Ident,
|
||||
taskId: Ident
|
||||
): ConnectionIO[List[(RAddonArchive, RAddonRunConfigAddon)]] = {
|
||||
val run = RAddonRunConfig.as("run")
|
||||
val aa = RAddonArchive.as("aa")
|
||||
val ta = RAddonRunConfigAddon.as("ta")
|
||||
|
||||
Select(
|
||||
combineNel(select(aa.all), select(ta.all)),
|
||||
from(run)
|
||||
.innerJoin(ta, ta.runConfigId === run.id)
|
||||
.innerJoin(aa, aa.id === ta.addonId),
|
||||
run.cid === cid && run.enabled === true && run.id === taskId
|
||||
).orderBy(ta.position.asc)
|
||||
.build
|
||||
.query[(RAddonArchive, RAddonRunConfigAddon)]
|
||||
.to[List]
|
||||
}
|
||||
|
||||
def findEnabledRefs(
|
||||
cid: Ident,
|
||||
trigger: AddonTriggerType,
|
||||
addonTaskIds: Set[Ident]
|
||||
): Stream[ConnectionIO, (RAddonRunConfig, List[(RAddonArchive, String)])] = {
|
||||
val run = RAddonRunConfig.as("run")
|
||||
val aa = RAddonArchive.as("aa")
|
||||
val ta = RAddonRunConfigAddon.as("ta")
|
||||
val tt = RAddonRunConfigTrigger.as("tt")
|
||||
|
||||
val taskIdFilter = NonEmptyList
|
||||
.fromList(addonTaskIds.toList)
|
||||
.map(nel => run.id.in(nel))
|
||||
val validTasks = TableDef("valid_task")
|
||||
val validTaskId = Column[Ident]("id", validTasks)
|
||||
val query =
|
||||
withCte(
|
||||
validTasks -> Select(
|
||||
select(run.all),
|
||||
from(run)
|
||||
.innerJoin(tt, tt.runConfigId === run.id),
|
||||
run.cid === cid && run.enabled === true && tt.trigger === trigger &&? taskIdFilter
|
||||
).distinct
|
||||
)(
|
||||
Select(
|
||||
combineNel(
|
||||
select(run.all.map(_.copy(table = validTasks))),
|
||||
select(aa.all),
|
||||
select(ta.args)
|
||||
),
|
||||
from(validTasks)
|
||||
.innerJoin(ta, ta.runConfigId === validTaskId)
|
||||
.innerJoin(aa, aa.id === ta.addonId)
|
||||
).orderBy(validTaskId)
|
||||
).build
|
||||
|
||||
query
|
||||
.query[(RAddonRunConfig, RAddonArchive, String)]
|
||||
.stream
|
||||
.groupAdjacentBy(_._1.id)
|
||||
.map { case (_, chunk) =>
|
||||
val list = chunk.toList
|
||||
(list.head._1, list.map(e => (e._2, e._3)))
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,72 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.store.records
|
||||
|
||||
import cats.data.OptionT
|
||||
import cats.syntax.all._
|
||||
|
||||
import docspell.addons.AddonTriggerType
|
||||
import docspell.common._
|
||||
import docspell.store.qb.DSL._
|
||||
import docspell.store.qb._
|
||||
import docspell.store.records.AddonRunConfigResolved.AddonRef
|
||||
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
|
||||
final case class AddonRunConfigResolved(
|
||||
config: RAddonRunConfig,
|
||||
refs: List[AddonRef],
|
||||
trigger: List[RAddonRunConfigTrigger]
|
||||
) {}
|
||||
|
||||
object AddonRunConfigResolved {
|
||||
|
||||
case class AddonRef(archive: RAddonArchive, ref: RAddonRunConfigAddon)
|
||||
|
||||
def findAddonRefs(configId: Ident): ConnectionIO[List[AddonRef]] = {
|
||||
val ca = RAddonRunConfigAddon.as("ca")
|
||||
val aa = RAddonArchive.as("aa")
|
||||
Select(
|
||||
select(combineNel(aa.all, ca.all)),
|
||||
from(ca)
|
||||
.innerJoin(aa, aa.id === ca.addonId),
|
||||
ca.runConfigId === configId
|
||||
).build.query[AddonRef].to[List]
|
||||
}
|
||||
|
||||
def getRefsAndTrigger(
|
||||
configId: Ident
|
||||
): ConnectionIO[(List[AddonRef], List[RAddonRunConfigTrigger])] =
|
||||
(findAddonRefs(configId), RAddonRunConfigTrigger.findByRunConfig(configId)).tupled
|
||||
|
||||
def findById(
|
||||
configId: Ident,
|
||||
collective: Ident,
|
||||
enabled: Option[Boolean]
|
||||
): ConnectionIO[Option[AddonRunConfigResolved]] =
|
||||
(for {
|
||||
cfg <- OptionT(RAddonRunConfig.findById(collective, configId))
|
||||
.filter(c => enabled.isEmpty || enabled == c.enabled.some)
|
||||
(refs, tri) <- OptionT.liftF(getRefsAndTrigger(configId))
|
||||
} yield AddonRunConfigResolved(cfg, refs, tri)).value
|
||||
|
||||
def findAllForCollective(
|
||||
cid: Ident,
|
||||
enabled: Option[Boolean],
|
||||
trigger: Set[AddonTriggerType],
|
||||
configIds: Set[Ident]
|
||||
): ConnectionIO[List[AddonRunConfigResolved]] =
|
||||
for {
|
||||
cfgs <- RAddonRunConfig.findByCollective(cid, enabled, trigger, configIds)
|
||||
result <- cfgs.traverse(ac =>
|
||||
getRefsAndTrigger(ac.id).map { case (refs, tri) =>
|
||||
AddonRunConfigResolved(ac, refs, tri)
|
||||
}
|
||||
)
|
||||
} yield result
|
||||
}
|
@ -0,0 +1,184 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.store.records
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
import docspell.addons.{AddonArchive, AddonMeta, AddonTriggerType}
|
||||
import docspell.common._
|
||||
import docspell.store.file.FileUrlReader
|
||||
import docspell.store.qb.DSL._
|
||||
import docspell.store.qb._
|
||||
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||
import io.circe.{Decoder, Encoder}
|
||||
|
||||
final case class RAddonArchive(
|
||||
id: Ident,
|
||||
cid: Ident,
|
||||
fileId: FileKey,
|
||||
originalUrl: Option[LenientUri],
|
||||
name: String,
|
||||
version: String,
|
||||
description: Option[String],
|
||||
triggers: Set[AddonTriggerType],
|
||||
created: Timestamp
|
||||
) {
|
||||
|
||||
def nameAndVersion: String =
|
||||
s"${name}-${version}"
|
||||
|
||||
def isUnchanged(meta: AddonMeta): Boolean =
|
||||
name == meta.meta.name &&
|
||||
version == meta.meta.version &&
|
||||
description == meta.meta.description
|
||||
|
||||
def isChanged(meta: AddonMeta): Boolean =
|
||||
!isUnchanged(meta)
|
||||
|
||||
def asArchive: AddonArchive =
|
||||
AddonArchive(FileUrlReader.url(fileId), name, version)
|
||||
|
||||
def update(file: FileKey, meta: AddonMeta): RAddonArchive =
|
||||
copy(
|
||||
fileId = file,
|
||||
name = meta.meta.name,
|
||||
version = meta.meta.version,
|
||||
description = meta.meta.description,
|
||||
triggers = meta.triggers.getOrElse(Set.empty)
|
||||
)
|
||||
}
|
||||
|
||||
object RAddonArchive {
|
||||
case class Table(alias: Option[String]) extends TableDef {
|
||||
val tableName = "addon_archive"
|
||||
|
||||
val id = Column[Ident]("id", this)
|
||||
val cid = Column[Ident]("cid", this)
|
||||
val fileId = Column[FileKey]("file_id", this)
|
||||
val originalUrl = Column[LenientUri]("original_url", this)
|
||||
val name = Column[String]("name", this)
|
||||
val version = Column[String]("version", this)
|
||||
val description = Column[String]("description", this)
|
||||
val triggers = Column[Set[AddonTriggerType]]("triggers", this)
|
||||
val created = Column[Timestamp]("created", this)
|
||||
|
||||
val all: NonEmptyList[Column[_]] =
|
||||
NonEmptyList.of(
|
||||
id,
|
||||
cid,
|
||||
fileId,
|
||||
originalUrl,
|
||||
name,
|
||||
version,
|
||||
description,
|
||||
triggers,
|
||||
created
|
||||
)
|
||||
}
|
||||
|
||||
def apply(
|
||||
id: Ident,
|
||||
cid: Ident,
|
||||
fileId: FileKey,
|
||||
originalUrl: Option[LenientUri],
|
||||
meta: AddonMeta,
|
||||
created: Timestamp
|
||||
): RAddonArchive =
|
||||
RAddonArchive(
|
||||
id,
|
||||
cid,
|
||||
fileId,
|
||||
originalUrl,
|
||||
meta.meta.name,
|
||||
meta.meta.version,
|
||||
meta.meta.description,
|
||||
meta.triggers.getOrElse(Set.empty),
|
||||
created
|
||||
)
|
||||
|
||||
def as(alias: String): Table =
|
||||
Table(Some(alias))
|
||||
|
||||
val T = Table(None)
|
||||
|
||||
def insert(r: RAddonArchive, silent: Boolean): ConnectionIO[Int] = {
|
||||
val values =
|
||||
sql"${r.id}, ${r.cid}, ${r.fileId}, ${r.originalUrl}, ${r.name}, ${r.version}, ${r.description}, ${r.triggers}, ${r.created}"
|
||||
|
||||
if (silent) DML.insertSilent(T, T.all, values)
|
||||
else DML.insert(T, T.all, values)
|
||||
}
|
||||
|
||||
def existsByUrl(cid: Ident, url: LenientUri): ConnectionIO[Boolean] =
|
||||
Select(
|
||||
select(count(T.id)),
|
||||
from(T),
|
||||
T.cid === cid && T.originalUrl === url
|
||||
).build.query[Int].unique.map(_ > 0)
|
||||
|
||||
def findByUrl(cid: Ident, url: LenientUri): ConnectionIO[Option[RAddonArchive]] =
|
||||
Select(
|
||||
select(T.all),
|
||||
from(T),
|
||||
T.cid === cid && T.originalUrl === url
|
||||
).build.query[RAddonArchive].option
|
||||
|
||||
def findByNameAndVersion(
|
||||
cid: Ident,
|
||||
name: String,
|
||||
version: String
|
||||
): ConnectionIO[Option[RAddonArchive]] =
|
||||
Select(
|
||||
select(T.all),
|
||||
from(T),
|
||||
T.cid === cid && T.name === name && T.version === version
|
||||
).build.query[RAddonArchive].option
|
||||
|
||||
def findById(cid: Ident, id: Ident): ConnectionIO[Option[RAddonArchive]] =
|
||||
Select(
|
||||
select(T.all),
|
||||
from(T),
|
||||
T.cid === cid && T.id === id
|
||||
).build.query[RAddonArchive].option
|
||||
|
||||
def findByIds(cid: Ident, ids: NonEmptyList[Ident]): ConnectionIO[List[RAddonArchive]] =
|
||||
Select(
|
||||
select(T.all),
|
||||
from(T),
|
||||
T.cid === cid && T.id.in(ids)
|
||||
).orderBy(T.name).build.query[RAddonArchive].to[List]
|
||||
|
||||
def update(r: RAddonArchive): ConnectionIO[Int] =
|
||||
DML.update(
|
||||
T,
|
||||
T.id === r.id && T.cid === r.cid,
|
||||
DML.set(
|
||||
T.fileId.setTo(r.fileId),
|
||||
T.originalUrl.setTo(r.originalUrl),
|
||||
T.name.setTo(r.name),
|
||||
T.version.setTo(r.version),
|
||||
T.description.setTo(r.description),
|
||||
T.triggers.setTo(r.triggers)
|
||||
)
|
||||
)
|
||||
|
||||
def listAll(cid: Ident): ConnectionIO[List[RAddonArchive]] =
|
||||
Select(
|
||||
select(T.all),
|
||||
from(T),
|
||||
T.cid === cid
|
||||
).orderBy(T.name.asc).build.query[RAddonArchive].to[List]
|
||||
|
||||
def deleteById(cid: Ident, id: Ident): ConnectionIO[Int] =
|
||||
DML.delete(T, T.cid === cid && T.id === id)
|
||||
|
||||
implicit val jsonDecoder: Decoder[RAddonArchive] = deriveDecoder
|
||||
implicit val jsonEncoder: Encoder[RAddonArchive] = deriveEncoder
|
||||
}
|
@ -0,0 +1,99 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.store.records
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
import docspell.addons.AddonTriggerType
|
||||
import docspell.common._
|
||||
import docspell.store.qb.DSL._
|
||||
import docspell.store.qb._
|
||||
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
|
||||
final case class RAddonRunConfig(
|
||||
id: Ident,
|
||||
cid: Ident,
|
||||
userId: Option[Ident],
|
||||
name: String,
|
||||
enabled: Boolean,
|
||||
created: Timestamp
|
||||
)
|
||||
|
||||
object RAddonRunConfig {
|
||||
final case class Table(alias: Option[String]) extends TableDef {
|
||||
val tableName = "addon_run_config"
|
||||
|
||||
val id = Column[Ident]("id", this)
|
||||
val cid = Column[Ident]("cid", this)
|
||||
val userId = Column[Ident]("user_id", this)
|
||||
val name = Column[String]("name", this)
|
||||
val enabled = Column[Boolean]("enabled", this)
|
||||
val created = Column[Timestamp]("created", this)
|
||||
|
||||
val all: NonEmptyList[Column[_]] =
|
||||
NonEmptyList.of(id, cid, userId, name, enabled, created)
|
||||
}
|
||||
|
||||
def as(alias: String): Table = Table(Some(alias))
|
||||
val T = Table(None)
|
||||
|
||||
def insert(r: RAddonRunConfig): ConnectionIO[Int] =
|
||||
DML.insert(
|
||||
T,
|
||||
T.all,
|
||||
sql"${r.id}, ${r.cid}, ${r.userId}, ${r.name}, ${r.enabled}, ${r.created}"
|
||||
)
|
||||
|
||||
def update(r: RAddonRunConfig): ConnectionIO[Int] =
|
||||
DML.update(
|
||||
T,
|
||||
T.id === r.id,
|
||||
DML.set(
|
||||
T.name.setTo(r.name),
|
||||
T.enabled.setTo(r.enabled),
|
||||
T.userId.setTo(r.userId)
|
||||
)
|
||||
)
|
||||
|
||||
def findById(cid: Ident, id: Ident): ConnectionIO[Option[RAddonRunConfig]] =
|
||||
Select(select(T.all), from(T), T.cid === cid && T.id === id).build
|
||||
.query[RAddonRunConfig]
|
||||
.option
|
||||
|
||||
def findByCollective(
|
||||
cid: Ident,
|
||||
enabled: Option[Boolean],
|
||||
trigger: Set[AddonTriggerType],
|
||||
configIds: Set[Ident]
|
||||
): ConnectionIO[List[RAddonRunConfig]] = {
|
||||
val ac = RAddonRunConfig.as("ac")
|
||||
val tt = RAddonRunConfigTrigger.as("tt")
|
||||
val filter =
|
||||
ac.cid === cid &&?
|
||||
enabled.map(e => ac.enabled === e) &&?
|
||||
NonEmptyList.fromList(configIds.toList).map(ids => ac.id.in(ids))
|
||||
|
||||
val selectConfigs =
|
||||
NonEmptyList.fromList(trigger.toList) match {
|
||||
case Some(tri) =>
|
||||
Select(
|
||||
select(ac.all),
|
||||
from(ac).innerJoin(tt, tt.runConfigId === ac.id),
|
||||
filter && tt.trigger.in(tri)
|
||||
)
|
||||
case None =>
|
||||
Select(select(ac.all), from(ac), filter)
|
||||
}
|
||||
|
||||
selectConfigs.build.query[RAddonRunConfig].to[List]
|
||||
}
|
||||
|
||||
def deleteById(cid: Ident, id: Ident): ConnectionIO[Int] =
|
||||
DML.delete(T, T.cid === cid && T.id === id)
|
||||
}
|
@ -0,0 +1,68 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.store.records
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
import docspell.common._
|
||||
import docspell.store.qb.DSL._
|
||||
import docspell.store.qb._
|
||||
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
|
||||
final case class RAddonRunConfigAddon(
|
||||
id: Ident,
|
||||
runConfigId: Ident,
|
||||
addonId: Ident,
|
||||
args: String,
|
||||
position: Int
|
||||
)
|
||||
|
||||
object RAddonRunConfigAddon {
|
||||
final case class Table(alias: Option[String]) extends TableDef {
|
||||
val tableName = "addon_run_config_addon"
|
||||
|
||||
val id = Column[Ident]("id", this)
|
||||
val runConfigId = Column[Ident]("addon_run_config_id", this)
|
||||
val addonId = Column[Ident]("addon_id", this)
|
||||
val args = Column[String]("args", this)
|
||||
val position = Column[Int]("position", this)
|
||||
|
||||
val all: NonEmptyList[Column[_]] =
|
||||
NonEmptyList.of(id, runConfigId, addonId, args, position)
|
||||
}
|
||||
|
||||
def as(alias: String): Table = Table(Some(alias))
|
||||
val T = Table(None)
|
||||
|
||||
def insert(r: RAddonRunConfigAddon): ConnectionIO[Int] =
|
||||
DML.insert(
|
||||
T,
|
||||
T.all,
|
||||
sql"${r.id}, ${r.runConfigId}, ${r.addonId}, ${r.args}, ${r.position}"
|
||||
)
|
||||
|
||||
def updateArgs(addonTaskId: Ident, addonId: Ident, args: String): ConnectionIO[Int] =
|
||||
DML.update(
|
||||
T,
|
||||
T.runConfigId === addonTaskId && T.addonId === addonId,
|
||||
DML.set(
|
||||
T.args.setTo(args)
|
||||
)
|
||||
)
|
||||
|
||||
def findByRunConfig(addonTaskId: Ident): ConnectionIO[List[RAddonRunConfigAddon]] =
|
||||
Select(select(T.all), from(T), T.runConfigId === addonTaskId)
|
||||
.orderBy(T.position.asc)
|
||||
.build
|
||||
.query[RAddonRunConfigAddon]
|
||||
.to[List]
|
||||
|
||||
def deleteAllForConfig(addonTaskId: Ident): ConnectionIO[Int] =
|
||||
DML.delete(T, T.runConfigId === addonTaskId)
|
||||
}
|
@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Copyright 2020 Eike K. & Contributors
|
||||
*
|
||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
*/
|
||||
|
||||
package docspell.store.records
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
import docspell.addons.AddonTriggerType
|
||||
import docspell.common._
|
||||
import docspell.store.qb.DSL._
|
||||
import docspell.store.qb._
|
||||
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
|
||||
final case class RAddonRunConfigTrigger(
|
||||
id: Ident,
|
||||
runConfigId: Ident,
|
||||
trigger: AddonTriggerType
|
||||
)
|
||||
|
||||
object RAddonRunConfigTrigger {
|
||||
final case class Table(alias: Option[String]) extends TableDef {
|
||||
val tableName = "addon_run_config_trigger"
|
||||
|
||||
val id = Column[Ident]("id", this)
|
||||
val runConfigId = Column[Ident]("addon_run_config_id", this)
|
||||
val trigger = Column[AddonTriggerType]("triggers", this)
|
||||
|
||||
val all: NonEmptyList[Column[_]] =
|
||||
NonEmptyList.of(id, runConfigId, trigger)
|
||||
}
|
||||
|
||||
def as(alias: String): Table = Table(Some(alias))
|
||||
val T = Table(None)
|
||||
|
||||
def deleteAllForConfig(addonTaskId: Ident): ConnectionIO[Int] =
|
||||
DML.delete(T, T.runConfigId === addonTaskId)
|
||||
|
||||
def insert(r: RAddonRunConfigTrigger): ConnectionIO[Int] =
|
||||
DML.insert(T, T.all, sql"${r.id}, ${r.runConfigId}, ${r.trigger}")
|
||||
|
||||
def insertAll(
|
||||
addonTaskId: Ident,
|
||||
triggers: NonEmptyList[AddonTriggerType]
|
||||
): ConnectionIO[Int] = {
|
||||
val records = triggers.traverse(t =>
|
||||
Ident.randomId[ConnectionIO].map(id => RAddonRunConfigTrigger(id, addonTaskId, t))
|
||||
)
|
||||
val inserts =
|
||||
s"INSERT INTO ${T.tableName} (id, addon_run_config_id, trigger) VALUES (?,?,?)"
|
||||
records.flatMap(rs => Update[RAddonRunConfigTrigger](inserts).updateMany(rs))
|
||||
}
|
||||
|
||||
def findByRunConfig(addonTaskId: Ident): ConnectionIO[List[RAddonRunConfigTrigger]] =
|
||||
Select(select(T.all), from(T), T.runConfigId === addonTaskId).build
|
||||
.query[RAddonRunConfigTrigger]
|
||||
.to[List]
|
||||
}
|
@ -119,6 +119,9 @@ object RAttachmentMeta {
|
||||
def updatePageCount(mid: Ident, pageCount: Option[Int]): ConnectionIO[Int] =
|
||||
DML.update(T, T.id === mid, DML.set(T.pages.setTo(pageCount)))
|
||||
|
||||
def updateContent(id: Ident, text: String): ConnectionIO[Int] =
|
||||
DML.update(T, T.id === id, DML.set(T.content.setTo(text)))
|
||||
|
||||
def delete(attachId: Ident): ConnectionIO[Int] =
|
||||
DML.delete(T, T.id === attachId)
|
||||
}
|
||||
|
@ -6,7 +6,8 @@
|
||||
|
||||
package docspell.store.records
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
import cats.data.{NonEmptyList, OptionT}
|
||||
import cats.syntax.all._
|
||||
|
||||
import docspell.common.{FileKey, _}
|
||||
import docspell.store.qb.DSL._
|
||||
@ -44,12 +45,29 @@ object RAttachmentPreview {
|
||||
def insert(v: RAttachmentPreview): ConnectionIO[Int] =
|
||||
DML.insert(T, T.all, fr"${v.id},${v.fileId},${v.name},${v.created}")
|
||||
|
||||
def update(r: RAttachmentPreview): ConnectionIO[Int] =
|
||||
DML.update(
|
||||
T,
|
||||
T.id === r.id,
|
||||
DML.set(
|
||||
T.fileId.setTo(r.fileId),
|
||||
T.name.setTo(r.name)
|
||||
)
|
||||
)
|
||||
|
||||
def findById(attachId: Ident): ConnectionIO[Option[RAttachmentPreview]] =
|
||||
run(select(T.all), from(T), T.id === attachId).query[RAttachmentPreview].option
|
||||
|
||||
def delete(attachId: Ident): ConnectionIO[Int] =
|
||||
DML.delete(T, T.id === attachId)
|
||||
|
||||
def upsert(r: RAttachmentPreview): ConnectionIO[Option[FileKey]] =
|
||||
OptionT(findById(r.id))
|
||||
.semiflatMap(existing =>
|
||||
update(existing.copy(fileId = r.fileId, name = r.name)).as(Some(existing.fileId))
|
||||
)
|
||||
.getOrElseF(insert(r).as(None))
|
||||
|
||||
def findByIdAndCollective(
|
||||
attachId: Ident,
|
||||
collective: Ident
|
||||
|
@ -309,20 +309,22 @@ object RItem {
|
||||
def updateFolder(
|
||||
itemId: Ident,
|
||||
coll: Ident,
|
||||
folderId: Option[Ident]
|
||||
): ConnectionIO[Int] =
|
||||
folderIdOrName: Option[String]
|
||||
): ConnectionIO[(Int, Option[Ident])] =
|
||||
for {
|
||||
t <- currentTime
|
||||
fid <- folderId match {
|
||||
case Some(f) => RFolder.requireIdByIdOrName(f, f.id, coll).map(_.some)
|
||||
case None => None.pure[ConnectionIO]
|
||||
fid <- folderIdOrName match {
|
||||
case Some(f) =>
|
||||
val fid = Ident.fromString(f).getOrElse(Ident.unsafe(""))
|
||||
RFolder.requireIdByIdOrName(fid, f, coll).map(_.some)
|
||||
case None => None.pure[ConnectionIO]
|
||||
}
|
||||
n <- DML.update(
|
||||
T,
|
||||
T.cid === coll && T.id === itemId,
|
||||
DML.set(T.folder.setTo(fid), T.updated.setTo(t))
|
||||
)
|
||||
} yield n
|
||||
} yield (n, fid)
|
||||
|
||||
def updateNotes(itemId: Ident, coll: Ident, text: Option[String]): ConnectionIO[Int] =
|
||||
for {
|
||||
@ -334,6 +336,26 @@ object RItem {
|
||||
)
|
||||
} yield n
|
||||
|
||||
def appendNotes(
|
||||
itemId: Ident,
|
||||
cid: Ident,
|
||||
text: String,
|
||||
sep: Option[String]
|
||||
): ConnectionIO[Option[String]] = {
|
||||
val curNotes =
|
||||
Select(select(T.notes), from(T), T.cid === cid && T.id === itemId).build
|
||||
.query[Option[String]]
|
||||
.option
|
||||
|
||||
curNotes.flatMap {
|
||||
case Some(notes) =>
|
||||
val newText = notes.map(_ + sep.getOrElse("")).getOrElse("") + text
|
||||
updateNotes(itemId, cid, Some(newText)).as(newText.some)
|
||||
case None =>
|
||||
(None: Option[String]).pure[ConnectionIO]
|
||||
}
|
||||
}
|
||||
|
||||
def updateName(itemId: Ident, coll: Ident, itemName: String): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
|
@ -16,6 +16,7 @@ import docspell.store.qb._
|
||||
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import scodec.bits.ByteVector
|
||||
|
||||
case class RNode(
|
||||
id: Ident,
|
||||
@ -23,13 +24,19 @@ case class RNode(
|
||||
url: LenientUri,
|
||||
updated: Timestamp,
|
||||
created: Timestamp,
|
||||
notFound: Int
|
||||
notFound: Int,
|
||||
serverSecret: Option[ByteVector]
|
||||
) {}
|
||||
|
||||
object RNode {
|
||||
|
||||
def apply[F[_]: Sync](id: Ident, nodeType: NodeType, uri: LenientUri): F[RNode] =
|
||||
Timestamp.current[F].map(now => RNode(id, nodeType, uri, now, now, 0))
|
||||
def apply[F[_]: Sync](
|
||||
id: Ident,
|
||||
nodeType: NodeType,
|
||||
uri: LenientUri,
|
||||
serverSecret: Option[ByteVector]
|
||||
): F[RNode] =
|
||||
Timestamp.current[F].map(now => RNode(id, nodeType, uri, now, now, 0, serverSecret))
|
||||
|
||||
final case class Table(alias: Option[String]) extends TableDef {
|
||||
val tableName = "node"
|
||||
@ -40,7 +47,9 @@ object RNode {
|
||||
val updated = Column[Timestamp]("updated", this)
|
||||
val created = Column[Timestamp]("created", this)
|
||||
val notFound = Column[Int]("not_found", this)
|
||||
val all = NonEmptyList.of[Column[_]](id, nodeType, url, updated, created, notFound)
|
||||
val serverSecret = Column[ByteVector]("server_secret", this)
|
||||
val all = NonEmptyList
|
||||
.of[Column[_]](id, nodeType, url, updated, created, notFound, serverSecret)
|
||||
}
|
||||
|
||||
def as(alias: String): Table =
|
||||
@ -52,7 +61,7 @@ object RNode {
|
||||
DML.insert(
|
||||
t,
|
||||
t.all,
|
||||
fr"${v.id},${v.nodeType},${v.url},${v.updated},${v.created},${v.notFound}"
|
||||
fr"${v.id},${v.nodeType},${v.url},${v.updated},${v.created},${v.notFound},${v.serverSecret}"
|
||||
)
|
||||
}
|
||||
|
||||
@ -65,6 +74,7 @@ object RNode {
|
||||
DML.set(
|
||||
t.nodeType.setTo(v.nodeType),
|
||||
t.url.setTo(v.url),
|
||||
t.serverSecret.setTo(v.serverSecret),
|
||||
t.updated.setTo(v.updated)
|
||||
)
|
||||
)
|
||||
|
@ -152,6 +152,14 @@ object RUser {
|
||||
.query[Ident]
|
||||
.option
|
||||
|
||||
case class IdAndLogin(uid: Ident, login: Ident)
|
||||
def getIdByIdOrLogin(idOrLogin: Ident): ConnectionIO[Option[IdAndLogin]] =
|
||||
Select(
|
||||
select(T.uid, T.login),
|
||||
from(T),
|
||||
T.uid === idOrLogin || T.login === idOrLogin
|
||||
).build.query[IdAndLogin].option
|
||||
|
||||
def getIdByAccount(account: AccountId): ConnectionIO[Ident] =
|
||||
OptionT(findIdByAccount(account)).getOrElseF(
|
||||
Sync[ConnectionIO].raiseError(
|
||||
|
Reference in New Issue
Block a user