mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-06-22 02:18:26 +00:00
Initial version.
Features: - Upload PDF files let them analyze - Manage meta data and items - See processing in webapp
This commit is contained in:
@ -0,0 +1,210 @@
|
||||
CREATE TABLE `filemeta` (
|
||||
`id` varchar(254) not null primary key,
|
||||
`timestamp` varchar(40) not null,
|
||||
`mimetype` varchar(254) not null,
|
||||
`length` bigint not null,
|
||||
`checksum` varchar(254) not null,
|
||||
`chunks` int not null,
|
||||
`chunksize` int not null
|
||||
);
|
||||
|
||||
CREATE TABLE `filechunk` (
|
||||
fileId varchar(254) not null,
|
||||
chunkNr int not null,
|
||||
chunkLength int not null,
|
||||
chunkData mediumblob not null,
|
||||
primary key (fileId, chunkNr)
|
||||
);
|
||||
|
||||
CREATE TABLE `collective` (
|
||||
`cid` varchar(254) not null primary key,
|
||||
`state` varchar(254) not null,
|
||||
`doclang` varchar(254) not null,
|
||||
`created` timestamp not null
|
||||
);
|
||||
|
||||
CREATE TABLE `user_` (
|
||||
`uid` varchar(254) not null primary key,
|
||||
`login` varchar(254) not null,
|
||||
`cid` varchar(254) not null,
|
||||
`password` varchar(254) not null,
|
||||
`state` varchar(254) not null,
|
||||
`email` varchar(254),
|
||||
`logincount` int not null,
|
||||
`lastlogin` timestamp,
|
||||
`created` timestamp not null,
|
||||
unique (`cid`, `login`),
|
||||
foreign key (`cid`) references `collective`(`cid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `invitation` (
|
||||
`id` varchar(254) not null primary key,
|
||||
`created` timestamp not null
|
||||
);
|
||||
|
||||
CREATE TABLE `source` (
|
||||
`sid` varchar(254) not null primary key,
|
||||
`cid` varchar(254) not null,
|
||||
`abbrev` varchar(254) not null,
|
||||
`description` text,
|
||||
`counter` int not null,
|
||||
`enabled` boolean not null,
|
||||
`priority` int not null,
|
||||
`created` timestamp not null,
|
||||
unique (`cid`, `abbrev`),
|
||||
foreign key (`cid`) references `collective`(`cid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `organization` (
|
||||
`oid` varchar(254) not null primary key,
|
||||
`cid` varchar(254) not null,
|
||||
`name` varchar(254) not null,
|
||||
`street` varchar(254),
|
||||
`zip` varchar(254),
|
||||
`city` varchar(254),
|
||||
`country` varchar(254),
|
||||
`notes` text,
|
||||
`created` timestamp not null,
|
||||
unique (`cid`, `name`),
|
||||
foreign key (`cid`) references `collective`(`cid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `person` (
|
||||
`pid` varchar(254) not null primary key,
|
||||
`cid` varchar(254) not null,
|
||||
`name` varchar(254) not null,
|
||||
`street` varchar(254),
|
||||
`zip` varchar(254),
|
||||
`city` varchar(254),
|
||||
`country` varchar(254),
|
||||
`notes` text,
|
||||
`concerning` boolean not null,
|
||||
`created` varchar(30) not null,
|
||||
unique (`cid`, `name`),
|
||||
foreign key (`cid`) references `collective`(`cid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `contact` (
|
||||
`contactid` varchar(254) not null primary key,
|
||||
`value` varchar(254) not null,
|
||||
`kind` varchar(254) not null,
|
||||
`pid` varchar(254),
|
||||
`oid` varchar(254),
|
||||
`created` timestamp not null,
|
||||
foreign key (`pid`) references `person`(`pid`),
|
||||
foreign key (`oid`) references `organization`(`oid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `equipment` (
|
||||
`eid` varchar(254) not null primary key,
|
||||
`cid` varchar(254) not null,
|
||||
`name` varchar(254) not null,
|
||||
`created` timestamp not null,
|
||||
unique (`cid`,`eid`),
|
||||
foreign key (`cid`) references `collective`(`cid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `item` (
|
||||
`itemid` varchar(254) not null primary key,
|
||||
`cid` varchar(254) not null,
|
||||
`name` varchar(254) not null,
|
||||
`itemdate` timestamp,
|
||||
`source` varchar(254) not null,
|
||||
`incoming` boolean not null,
|
||||
`state` varchar(254) not null,
|
||||
`corrorg` varchar(254),
|
||||
`corrperson` varchar(254),
|
||||
`concperson` varchar(254),
|
||||
`concequipment` varchar(254),
|
||||
`inreplyto` varchar(254),
|
||||
`duedate` timestamp,
|
||||
`notes` text,
|
||||
`created` timestamp not null,
|
||||
`updated` timestamp not null,
|
||||
foreign key (`inreplyto`) references `item`(`itemid`),
|
||||
foreign key (`corrorg`) references `organization`(`oid`),
|
||||
foreign key (`corrperson`) references `person`(`pid`),
|
||||
foreign key (`concperson`) references `person`(`pid`),
|
||||
foreign key (`concequipment`) references `equipment`(`eid`),
|
||||
foreign key (`cid`) references `collective`(`cid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `attachment` (
|
||||
`attachid` varchar(254) not null primary key,
|
||||
`itemid` varchar(254) not null,
|
||||
`filemetaid` varchar(254) not null,
|
||||
`position` int not null,
|
||||
`created` timestamp not null,
|
||||
`name` varchar(254),
|
||||
foreign key (`itemid`) references `item`(`itemid`),
|
||||
foreign key (`filemetaid`) references `filemeta`(`id`)
|
||||
);
|
||||
|
||||
CREATE TABLE `attachmentmeta` (
|
||||
`attachid` varchar(254) not null primary key,
|
||||
`content` text,
|
||||
`nerlabels` text,
|
||||
`itemproposals` text,
|
||||
foreign key (`attachid`) references `attachment`(`attachid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `tag` (
|
||||
`tid` varchar(254) not null primary key,
|
||||
`cid` varchar(254) not null,
|
||||
`name` varchar(254) not null,
|
||||
`category` varchar(254),
|
||||
`created` timestamp not null,
|
||||
unique (`cid`, `name`),
|
||||
foreign key (`cid`) references `collective`(`cid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `tagitem` (
|
||||
`tagitemid` varchar(254) not null primary key,
|
||||
`itemid` varchar(254) not null,
|
||||
`tid` varchar(254) not null,
|
||||
unique (`itemid`, `tid`),
|
||||
foreign key (`itemid`) references `item`(`itemid`),
|
||||
foreign key (`tid`) references `tag`(`tid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `job` (
|
||||
`jid` varchar(254) not null primary key,
|
||||
`task` varchar(254) not null,
|
||||
`group_` varchar(254) not null,
|
||||
`args` text not null,
|
||||
`subject` varchar(254) not null,
|
||||
`submitted` timestamp not null,
|
||||
`submitter` varchar(254) not null,
|
||||
`priority` int not null,
|
||||
`state` varchar(254) not null,
|
||||
`retries` int not null,
|
||||
`progress` int not null,
|
||||
`tracker` varchar(254),
|
||||
`worker` varchar(254),
|
||||
`started` timestamp,
|
||||
`finished` timestamp,
|
||||
`startedmillis` bigint
|
||||
);
|
||||
|
||||
CREATE TABLE `joblog` (
|
||||
`id` varchar(254) not null primary key,
|
||||
`jid` varchar(254) not null,
|
||||
`level` varchar(254) not null,
|
||||
`created` timestamp not null,
|
||||
`message` text not null,
|
||||
foreign key (`jid`) references `job`(`jid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `jobgroupuse` (
|
||||
`groupid` varchar(254) not null,
|
||||
`workerid` varchar(254) not null,
|
||||
primary key (`groupid`, `workerid`)
|
||||
);
|
||||
|
||||
CREATE TABLE `node` (
|
||||
`id` varchar(254) not null,
|
||||
`type` varchar(254) not null,
|
||||
`url` varchar(254) not null,
|
||||
`updated` timestamp not null,
|
||||
`created` timestamp not null
|
||||
)
|
@ -0,0 +1,211 @@
|
||||
|
||||
CREATE TABLE "filemeta" (
|
||||
"id" varchar(254) not null primary key,
|
||||
"timestamp" varchar(40) not null,
|
||||
"mimetype" varchar(254) not null,
|
||||
"length" bigint not null,
|
||||
"checksum" varchar(254) not null,
|
||||
"chunks" int not null,
|
||||
"chunksize" int not null
|
||||
);
|
||||
|
||||
CREATE TABLE "filechunk" (
|
||||
fileId varchar(254) not null,
|
||||
chunkNr int not null,
|
||||
chunkLength int not null,
|
||||
chunkData bytea not null,
|
||||
primary key (fileId, chunkNr)
|
||||
);
|
||||
|
||||
CREATE TABLE "collective" (
|
||||
"cid" varchar(254) not null primary key,
|
||||
"state" varchar(254) not null,
|
||||
"doclang" varchar(254) not null,
|
||||
"created" timestamp not null
|
||||
);
|
||||
|
||||
CREATE TABLE "user_" (
|
||||
"uid" varchar(254) not null primary key,
|
||||
"login" varchar(254) not null,
|
||||
"cid" varchar(254) not null,
|
||||
"password" varchar(254) not null,
|
||||
"state" varchar(254) not null,
|
||||
"email" varchar(254),
|
||||
"logincount" int not null,
|
||||
"lastlogin" timestamp,
|
||||
"created" timestamp not null,
|
||||
unique ("cid", "login"),
|
||||
foreign key ("cid") references "collective"("cid")
|
||||
);
|
||||
|
||||
CREATE TABLE "invitation" (
|
||||
"id" varchar(254) not null primary key,
|
||||
"created" timestamp not null
|
||||
);
|
||||
|
||||
CREATE TABLE "source" (
|
||||
"sid" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"abbrev" varchar(254) not null,
|
||||
"description" text,
|
||||
"counter" int not null,
|
||||
"enabled" boolean not null,
|
||||
"priority" int not null,
|
||||
"created" timestamp not null,
|
||||
unique ("cid", "abbrev"),
|
||||
foreign key ("cid") references "collective"("cid")
|
||||
);
|
||||
|
||||
CREATE TABLE "organization" (
|
||||
"oid" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"name" varchar(254) not null,
|
||||
"street" varchar(254),
|
||||
"zip" varchar(254),
|
||||
"city" varchar(254),
|
||||
"country" varchar(254),
|
||||
"notes" text,
|
||||
"created" timestamp not null,
|
||||
unique ("cid", "name"),
|
||||
foreign key ("cid") references "collective"("cid")
|
||||
);
|
||||
|
||||
CREATE TABLE "person" (
|
||||
"pid" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"name" varchar(254) not null,
|
||||
"street" varchar(254),
|
||||
"zip" varchar(254),
|
||||
"city" varchar(254),
|
||||
"country" varchar(254),
|
||||
"notes" text,
|
||||
"concerning" boolean not null,
|
||||
"created" varchar(30) not null,
|
||||
unique ("cid", "name"),
|
||||
foreign key ("cid") references "collective"("cid")
|
||||
);
|
||||
|
||||
CREATE TABLE "contact" (
|
||||
"contactid" varchar(254) not null primary key,
|
||||
"value" varchar(254) not null,
|
||||
"kind" varchar(254) not null,
|
||||
"pid" varchar(254),
|
||||
"oid" varchar(254),
|
||||
"created" timestamp not null,
|
||||
foreign key ("pid") references "person"("pid"),
|
||||
foreign key ("oid") references "organization"("oid")
|
||||
);
|
||||
|
||||
CREATE TABLE "equipment" (
|
||||
"eid" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"name" varchar(254) not null,
|
||||
"created" timestamp not null,
|
||||
unique ("cid","eid"),
|
||||
foreign key ("cid") references "collective"("cid")
|
||||
);
|
||||
|
||||
CREATE TABLE "item" (
|
||||
"itemid" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"name" varchar(254) not null,
|
||||
"itemdate" timestamp,
|
||||
"source" varchar(254) not null,
|
||||
"incoming" boolean not null,
|
||||
"state" varchar(254) not null,
|
||||
"corrorg" varchar(254),
|
||||
"corrperson" varchar(254),
|
||||
"concperson" varchar(254),
|
||||
"concequipment" varchar(254),
|
||||
"inreplyto" varchar(254),
|
||||
"duedate" timestamp,
|
||||
"notes" text,
|
||||
"created" timestamp not null,
|
||||
"updated" timestamp not null,
|
||||
foreign key ("inreplyto") references "item"("itemid"),
|
||||
foreign key ("corrorg") references "organization"("oid"),
|
||||
foreign key ("corrperson") references "person"("pid"),
|
||||
foreign key ("concperson") references "person"("pid"),
|
||||
foreign key ("concequipment") references "equipment"("eid"),
|
||||
foreign key ("cid") references "collective"("cid")
|
||||
);
|
||||
|
||||
CREATE TABLE "attachment" (
|
||||
"attachid" varchar(254) not null primary key,
|
||||
"itemid" varchar(254) not null,
|
||||
"filemetaid" varchar(254) not null,
|
||||
"position" int not null,
|
||||
"created" timestamp not null,
|
||||
"name" varchar(254),
|
||||
foreign key ("itemid") references "item"("itemid"),
|
||||
foreign key ("filemetaid") references "filemeta"("id")
|
||||
);
|
||||
|
||||
CREATE TABLE "attachmentmeta" (
|
||||
"attachid" varchar(254) not null primary key,
|
||||
"content" text,
|
||||
"nerlabels" text,
|
||||
"itemproposals" text,
|
||||
foreign key ("attachid") references "attachment"("attachid")
|
||||
);
|
||||
|
||||
CREATE TABLE "tag" (
|
||||
"tid" varchar(254) not null primary key,
|
||||
"cid" varchar(254) not null,
|
||||
"name" varchar(254) not null,
|
||||
"category" varchar(254),
|
||||
"created" timestamp not null,
|
||||
unique ("cid", "name"),
|
||||
foreign key ("cid") references "collective"("cid")
|
||||
);
|
||||
|
||||
CREATE TABLE "tagitem" (
|
||||
"tagitemid" varchar(254) not null primary key,
|
||||
"itemid" varchar(254) not null,
|
||||
"tid" varchar(254) not null,
|
||||
unique ("itemid", "tid"),
|
||||
foreign key ("itemid") references "item"("itemid"),
|
||||
foreign key ("tid") references "tag"("tid")
|
||||
);
|
||||
|
||||
CREATE TABLE "job" (
|
||||
"jid" varchar(254) not null primary key,
|
||||
"task" varchar(254) not null,
|
||||
"group_" varchar(254) not null,
|
||||
"args" text not null,
|
||||
"subject" varchar(254) not null,
|
||||
"submitted" timestamp not null,
|
||||
"submitter" varchar(254) not null,
|
||||
"priority" int not null,
|
||||
"state" varchar(254) not null,
|
||||
"retries" int not null,
|
||||
"progress" int not null,
|
||||
"tracker" varchar(254),
|
||||
"worker" varchar(254),
|
||||
"started" timestamp,
|
||||
"finished" timestamp,
|
||||
"startedmillis" bigint
|
||||
);
|
||||
|
||||
CREATE TABLE "joblog" (
|
||||
"id" varchar(254) not null primary key,
|
||||
"jid" varchar(254) not null,
|
||||
"level" varchar(254) not null,
|
||||
"created" timestamp not null,
|
||||
"message" text not null,
|
||||
foreign key ("jid") references "job"("jid")
|
||||
);
|
||||
|
||||
CREATE TABLE "jobgroupuse" (
|
||||
"groupid" varchar(254) not null,
|
||||
"workerid" varchar(254) not null,
|
||||
primary key ("groupid", "workerid")
|
||||
);
|
||||
|
||||
CREATE TABLE "node" (
|
||||
"id" varchar(254) not null,
|
||||
"type" varchar(254) not null,
|
||||
"url" varchar(254) not null,
|
||||
"updated" timestamp not null,
|
||||
"created" timestamp not null
|
||||
)
|
43
modules/store/src/main/scala/docspell/store/AddResult.scala
Normal file
43
modules/store/src/main/scala/docspell/store/AddResult.scala
Normal file
@ -0,0 +1,43 @@
|
||||
package docspell.store
|
||||
|
||||
import AddResult._
|
||||
|
||||
sealed trait AddResult {
|
||||
def toEither: Either[Throwable, Unit]
|
||||
def isSuccess: Boolean
|
||||
|
||||
def fold[A](fa: Success.type => A, fb: EntityExists => A, fc: Failure => A): A
|
||||
|
||||
def isError: Boolean =
|
||||
!isSuccess
|
||||
}
|
||||
|
||||
object AddResult {
|
||||
|
||||
def fromUpdate(e: Either[Throwable, Int]): AddResult =
|
||||
e.fold(Failure, n => if (n > 0) Success else Failure(new Exception("No rows updated")))
|
||||
|
||||
case object Success extends AddResult {
|
||||
def toEither = Right(())
|
||||
val isSuccess = true
|
||||
def fold[A](fa: Success.type => A, fb: EntityExists => A, fc: Failure => A): A =
|
||||
fa(this)
|
||||
}
|
||||
|
||||
case class EntityExists(msg: String) extends AddResult {
|
||||
def toEither = Left(new Exception(msg))
|
||||
val isSuccess = false
|
||||
def fold[A](fa: Success.type => A, fb: EntityExists => A, fc: Failure => A): A =
|
||||
fb(this)
|
||||
|
||||
def withMsg(msg: String): EntityExists =
|
||||
EntityExists(msg)
|
||||
}
|
||||
|
||||
case class Failure(ex: Throwable) extends AddResult {
|
||||
def toEither = Left(ex)
|
||||
val isSuccess = false
|
||||
def fold[A](fa: Success.type => A, fb: EntityExists => A, fc: Failure => A): A =
|
||||
fc(this)
|
||||
}
|
||||
}
|
@ -1,6 +1,8 @@
|
||||
package docspell.store
|
||||
|
||||
case class JdbcConfig(url: String
|
||||
import docspell.common.LenientUri
|
||||
|
||||
case class JdbcConfig(url: LenientUri
|
||||
, user: String
|
||||
, password: String
|
||||
) {
|
||||
@ -24,13 +26,16 @@ case class JdbcConfig(url: String
|
||||
sys.error("No JDBC url specified")
|
||||
}
|
||||
|
||||
override def toString: String =
|
||||
s"JdbcConfig($url, $user, ***)"
|
||||
}
|
||||
|
||||
object JdbcConfig {
|
||||
private[this] val jdbcRegex = "jdbc\\:([^\\:]+)\\:.*".r
|
||||
def extractDbmsName(jdbcUrl: String): Option[String] =
|
||||
jdbcUrl match {
|
||||
case jdbcRegex(n) => Some(n.toLowerCase)
|
||||
case _ => None
|
||||
def extractDbmsName(jdbcUrl: LenientUri): Option[String] =
|
||||
jdbcUrl.scheme.head match {
|
||||
case "jdbc" =>
|
||||
jdbcUrl.scheme.tail.headOption
|
||||
case _ =>
|
||||
None
|
||||
}
|
||||
}
|
||||
|
42
modules/store/src/main/scala/docspell/store/Store.scala
Normal file
42
modules/store/src/main/scala/docspell/store/Store.scala
Normal file
@ -0,0 +1,42 @@
|
||||
package docspell.store
|
||||
|
||||
import bitpeace.Bitpeace
|
||||
import fs2._
|
||||
import cats.effect.{Blocker, ContextShift, Effect, Resource}
|
||||
import docspell.store.impl.StoreImpl
|
||||
import doobie._
|
||||
import doobie.hikari.HikariTransactor
|
||||
|
||||
import scala.concurrent.ExecutionContext
|
||||
|
||||
trait Store[F[_]] {
|
||||
|
||||
def transact[A](prg: ConnectionIO[A]): F[A]
|
||||
|
||||
def transact[A](prg: Stream[ConnectionIO, A]): Stream[F, A]
|
||||
|
||||
def bitpeace: Bitpeace[F]
|
||||
|
||||
def add(insert: ConnectionIO[Int], exists: ConnectionIO[Boolean]): F[AddResult]
|
||||
}
|
||||
|
||||
object Store {
|
||||
|
||||
def create[F[_]: Effect: ContextShift](jdbc: JdbcConfig
|
||||
, connectEC: ExecutionContext
|
||||
, blocker: Blocker): Resource[F, Store[F]] = {
|
||||
|
||||
val hxa = HikariTransactor.newHikariTransactor[F](jdbc.driverClass
|
||||
, jdbc.url.asString
|
||||
, jdbc.user
|
||||
, jdbc.password
|
||||
, connectEC
|
||||
, blocker)
|
||||
|
||||
for {
|
||||
xa <- hxa
|
||||
st = new StoreImpl[F](jdbc, xa)
|
||||
_ <- Resource.liftF(st.migrate)
|
||||
} yield st
|
||||
}
|
||||
}
|
@ -0,0 +1,90 @@
|
||||
package docspell.store.impl
|
||||
|
||||
import doobie._, doobie.implicits._
|
||||
import docspell.store.impl.DoobieSyntax._
|
||||
|
||||
case class Column(name: String, ns: String = "", alias: String = "") {
|
||||
|
||||
val f = {
|
||||
val col =
|
||||
if (ns.isEmpty) Fragment.const(name)
|
||||
else Fragment.const(ns + "." + name)
|
||||
if (alias.isEmpty) col
|
||||
else col ++ fr"as" ++ Fragment.const(alias)
|
||||
}
|
||||
|
||||
def lowerLike[A: Put](value: A): Fragment =
|
||||
fr"lower(" ++ f ++ fr") LIKE $value"
|
||||
|
||||
def like[A: Put](value: A): Fragment =
|
||||
f ++ fr"LIKE $value"
|
||||
|
||||
def is[A: Put](value: A): Fragment =
|
||||
f ++ fr" = $value"
|
||||
|
||||
def is[A: Put](ov: Option[A]): Fragment = ov match {
|
||||
case Some(v) => f ++ fr" = $v"
|
||||
case None => fr"is null"
|
||||
}
|
||||
|
||||
def is(c: Column): Fragment =
|
||||
f ++ fr"=" ++ c.f
|
||||
|
||||
def isNull: Fragment =
|
||||
f ++ fr"is null"
|
||||
|
||||
def isNotNull: Fragment =
|
||||
f ++ fr"is not null"
|
||||
|
||||
def isIn(values: Seq[Fragment]): Fragment =
|
||||
f ++ fr"IN (" ++ commas(values) ++ fr")"
|
||||
|
||||
def isOrDiscard[A: Put](value: Option[A]): Fragment =
|
||||
value match {
|
||||
case Some(v) => is(v)
|
||||
case None => Fragment.empty
|
||||
}
|
||||
|
||||
def isOneOf[A: Put](values: Seq[A]): Fragment = {
|
||||
val vals = values.map(v => sql"$v")
|
||||
isIn(vals)
|
||||
}
|
||||
|
||||
def isNotOneOf[A: Put](values: Seq[A]): Fragment = {
|
||||
val vals = values.map(v => sql"$v")
|
||||
sql"(" ++ f ++ fr"is null or" ++ f ++ fr"not IN (" ++ commas(vals) ++ sql"))"
|
||||
}
|
||||
|
||||
def isGt[A: Put](a: A): Fragment =
|
||||
f ++ fr"> $a"
|
||||
|
||||
def isGt(c: Column): Fragment =
|
||||
f ++ fr">" ++ c.f
|
||||
|
||||
def isLt[A: Put](a: A): Fragment =
|
||||
f ++ fr"< $a"
|
||||
|
||||
def isLt(c: Column): Fragment =
|
||||
f ++ fr"<" ++ c.f
|
||||
|
||||
def setTo[A: Put](value: A): Fragment =
|
||||
is(value)
|
||||
|
||||
def setTo[A: Put](va: Option[A]): Fragment =
|
||||
f ++ fr" = $va"
|
||||
|
||||
def ++(next: Fragment): Fragment =
|
||||
f.++(next)
|
||||
|
||||
def prefix(ns: String): Column =
|
||||
Column(name, ns)
|
||||
|
||||
def as(alias: String): Column =
|
||||
Column(name, ns, alias)
|
||||
|
||||
def desc: Fragment =
|
||||
f ++ fr"desc"
|
||||
def asc: Fragment =
|
||||
f ++ fr"asc"
|
||||
|
||||
}
|
@ -0,0 +1,90 @@
|
||||
package docspell.store.impl
|
||||
|
||||
import java.time.format.DateTimeFormatter
|
||||
import java.time.{Instant, LocalDate}
|
||||
|
||||
import docspell.common.Timestamp
|
||||
|
||||
import docspell.common._
|
||||
import doobie._
|
||||
import doobie.util.log.Success
|
||||
import io.circe.{Decoder, Encoder}
|
||||
import docspell.common.syntax.all._
|
||||
|
||||
trait DoobieMeta {
|
||||
|
||||
implicit val sqlLogging = LogHandler({
|
||||
case e @ Success(_, _, _, _) =>
|
||||
DoobieMeta.logger.trace("SQL " + e)
|
||||
case e =>
|
||||
DoobieMeta.logger.error(s"SQL Failure: $e")
|
||||
})
|
||||
|
||||
def jsonMeta[A](implicit d: Decoder[A], e: Encoder[A]): Meta[A] =
|
||||
Meta[String].imap(str => str.parseJsonAs[A].fold(ex => throw ex, identity))(a => e.apply(a).noSpaces)
|
||||
|
||||
implicit val metaCollectiveState: Meta[CollectiveState] =
|
||||
Meta[String].imap(CollectiveState.unsafe)(CollectiveState.asString)
|
||||
|
||||
implicit val metaUserState: Meta[UserState] =
|
||||
Meta[String].imap(UserState.unsafe)(UserState.asString)
|
||||
|
||||
implicit val metaPassword: Meta[Password] =
|
||||
Meta[String].imap(Password(_))(_.pass)
|
||||
|
||||
implicit val metaIdent: Meta[Ident] =
|
||||
Meta[String].imap(Ident.unsafe)(_.id)
|
||||
|
||||
implicit val metaContactKind: Meta[ContactKind] =
|
||||
Meta[String].imap(ContactKind.unsafe)(_.asString)
|
||||
|
||||
implicit val metaTimestamp: Meta[Timestamp] =
|
||||
Meta[Instant].imap(Timestamp(_))(_.value)
|
||||
|
||||
implicit val metaJobState: Meta[JobState] =
|
||||
Meta[String].imap(JobState.unsafe)(_.name)
|
||||
|
||||
implicit val metaDirection: Meta[Direction] =
|
||||
Meta[Boolean].imap(flag => if (flag) Direction.Incoming: Direction else Direction.Outgoing: Direction)(d => Direction.isIncoming(d))
|
||||
|
||||
implicit val metaPriority: Meta[Priority] =
|
||||
Meta[Int].imap(Priority.fromInt)(Priority.toInt)
|
||||
|
||||
implicit val metaLogLevel: Meta[LogLevel] =
|
||||
Meta[String].imap(LogLevel.unsafeString)(_.name)
|
||||
|
||||
implicit val metaLenientUri: Meta[LenientUri] =
|
||||
Meta[String].imap(LenientUri.unsafe)(_.asString)
|
||||
|
||||
implicit val metaNodeType: Meta[NodeType] =
|
||||
Meta[String].imap(NodeType.unsafe)(_.name)
|
||||
|
||||
implicit val metaLocalDate: Meta[LocalDate] =
|
||||
Meta[String].imap(str => LocalDate.parse(str))(_.format(DateTimeFormatter.ISO_DATE))
|
||||
|
||||
implicit val metaItemState: Meta[ItemState] =
|
||||
Meta[String].imap(ItemState.unsafe)(_.name)
|
||||
|
||||
implicit val metNerTag: Meta[NerTag] =
|
||||
Meta[String].imap(NerTag.unsafe)(_.name)
|
||||
|
||||
implicit val metaNerLabel: Meta[NerLabel] =
|
||||
jsonMeta[NerLabel]
|
||||
|
||||
implicit val metaNerLabelList: Meta[List[NerLabel]] =
|
||||
jsonMeta[List[NerLabel]]
|
||||
|
||||
implicit val metaItemProposal: Meta[MetaProposal] =
|
||||
jsonMeta[MetaProposal]
|
||||
|
||||
implicit val metaItemProposalList: Meta[MetaProposalList] =
|
||||
jsonMeta[MetaProposalList]
|
||||
|
||||
implicit val metaLanguage: Meta[Language] =
|
||||
Meta[String].imap(Language.unsafe)(_.iso3)
|
||||
}
|
||||
|
||||
object DoobieMeta extends DoobieMeta {
|
||||
import org.log4s._
|
||||
private val logger = getLogger
|
||||
}
|
@ -0,0 +1,91 @@
|
||||
package docspell.store.impl
|
||||
|
||||
import docspell.common.Timestamp
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
|
||||
trait DoobieSyntax {
|
||||
|
||||
def coalesce(f0: Fragment, fs: Fragment*): Fragment =
|
||||
sql" coalesce(" ++ commas(f0 :: fs.toList) ++ sql") "
|
||||
|
||||
def power2(c: Column): Fragment =
|
||||
sql"power(2," ++ c.f ++ sql")"
|
||||
|
||||
def commas(fs: Seq[Fragment]): Fragment =
|
||||
fs.reduce(_ ++ Fragment.const(",") ++ _)
|
||||
|
||||
def commas(fa: Fragment, fas: Fragment*): Fragment =
|
||||
commas(fa :: fas.toList)
|
||||
|
||||
def and(fs: Seq[Fragment]): Fragment =
|
||||
Fragment.const(" (") ++ fs.filter(f => !isEmpty(f)).reduce(_ ++ Fragment.const(" AND ") ++ _) ++ Fragment.const(") ")
|
||||
|
||||
def and(f0: Fragment, fs: Fragment*): Fragment =
|
||||
and(f0 :: fs.toList)
|
||||
|
||||
def or(fs: Seq[Fragment]): Fragment =
|
||||
Fragment.const(" (") ++ fs.reduce(_ ++ Fragment.const(" OR ") ++ _) ++ Fragment.const(") ")
|
||||
def or(f0: Fragment, fs: Fragment*): Fragment =
|
||||
or(f0 :: fs.toList)
|
||||
|
||||
def where(fa: Fragment): Fragment =
|
||||
if (isEmpty(fa)) Fragment.empty
|
||||
else Fragment.const(" WHERE ") ++ fa
|
||||
|
||||
def orderBy(fa: Fragment): Fragment =
|
||||
Fragment.const(" ORDER BY ") ++ fa
|
||||
|
||||
def orderBy(c0: Fragment, cs: Fragment*): Fragment =
|
||||
fr"ORDER BY" ++ commas(c0 :: cs.toList)
|
||||
|
||||
def updateRow(table: Fragment, where: Fragment, setter: Fragment): Fragment =
|
||||
Fragment.const("UPDATE ") ++ table ++ Fragment.const(" SET ") ++ setter ++ this.where(where)
|
||||
|
||||
def insertRow(table: Fragment, cols: List[Column], vals: Fragment): Fragment =
|
||||
Fragment.const("INSERT INTO ") ++ table ++ Fragment.const(" (") ++
|
||||
commas(cols.map(_.f)) ++ Fragment.const(") VALUES (") ++ vals ++ Fragment.const(")")
|
||||
|
||||
def insertRows(table: Fragment, cols: List[Column], vals: List[Fragment]): Fragment =
|
||||
Fragment.const("INSERT INTO ") ++ table ++ Fragment.const(" (") ++
|
||||
commas(cols.map(_.f)) ++ Fragment.const(") VALUES ") ++ commas(vals.map(f => sql"(" ++ f ++ sql")"))
|
||||
|
||||
|
||||
def selectSimple(cols: Seq[Column], table: Fragment, where: Fragment): Fragment =
|
||||
selectSimple(commas(cols.map(_.f)), table, where)
|
||||
|
||||
def selectSimple(cols: Fragment, table: Fragment, where: Fragment): Fragment =
|
||||
Fragment.const("SELECT ") ++ cols ++
|
||||
Fragment.const(" FROM ") ++ table ++ this.where(where)
|
||||
|
||||
def selectDistinct(cols: Seq[Column], table: Fragment, where: Fragment): Fragment =
|
||||
Fragment.const("SELECT DISTINCT(") ++ commas(cols.map(_.f)) ++
|
||||
Fragment.const(") FROM ") ++ table ++ this.where(where)
|
||||
|
||||
|
||||
// def selectJoinCollective(cols: Seq[Column], fkCid: Column, table: Fragment, wh: Fragment): Fragment =
|
||||
// selectSimple(cols.map(_.prefix("a"))
|
||||
// , table ++ fr"a," ++ RCollective.table ++ fr"b"
|
||||
// , if (isEmpty(wh)) fkCid.prefix("a") is RCollective.Columns.id.prefix("b")
|
||||
// else and(wh, fkCid.prefix("a") is RCollective.Columns.id.prefix("b")))
|
||||
|
||||
def selectCount(col: Column, table: Fragment, where: Fragment): Fragment =
|
||||
Fragment.const("SELECT COUNT(") ++ col.f ++ Fragment.const(") FROM ") ++ table ++ this.where(where)
|
||||
|
||||
def deleteFrom(table: Fragment, where: Fragment): Fragment = {
|
||||
fr"DELETE FROM" ++ table ++ this.where(where)
|
||||
}
|
||||
|
||||
def withCTE(ps: (String, Fragment)*): Fragment = {
|
||||
val subsel: Seq[Fragment] = ps.map(p => Fragment.const(p._1) ++ fr"AS (" ++ p._2 ++ fr")")
|
||||
fr"WITH" ++ commas(subsel)
|
||||
}
|
||||
|
||||
def isEmpty(fragment: Fragment): Boolean =
|
||||
Fragment.empty.toString() == fragment.toString()
|
||||
|
||||
def currentTime: ConnectionIO[Timestamp] =
|
||||
Timestamp.current[ConnectionIO]
|
||||
}
|
||||
|
||||
object DoobieSyntax extends DoobieSyntax
|
@ -0,0 +1,5 @@
|
||||
package docspell.store.impl
|
||||
|
||||
|
||||
object Implicits extends DoobieMeta
|
||||
with DoobieSyntax
|
@ -0,0 +1,37 @@
|
||||
package docspell.store.impl
|
||||
|
||||
import bitpeace.{Bitpeace, BitpeaceConfig, TikaMimetypeDetect}
|
||||
import cats.effect.Effect
|
||||
import cats.implicits._
|
||||
import docspell.common.Ident
|
||||
import docspell.store.migrate.FlywayMigrate
|
||||
import docspell.store.{AddResult, JdbcConfig, Store}
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
|
||||
final class StoreImpl[F[_]: Effect](jdbc: JdbcConfig, xa: Transactor[F]) extends Store[F] {
|
||||
val bitpeaceCfg = BitpeaceConfig("filemeta", "filechunk", TikaMimetypeDetect, Ident.randomId[F].map(_.id))
|
||||
|
||||
def migrate: F[Int] =
|
||||
FlywayMigrate.run[F](jdbc)
|
||||
|
||||
def transact[A](prg: doobie.ConnectionIO[A]): F[A] =
|
||||
prg.transact(xa)
|
||||
|
||||
def transact[A](prg: fs2.Stream[doobie.ConnectionIO, A]): fs2.Stream[F, A] =
|
||||
prg.transact(xa)
|
||||
|
||||
def bitpeace: Bitpeace[F] =
|
||||
Bitpeace(bitpeaceCfg, xa)
|
||||
|
||||
def add(insert: ConnectionIO[Int], exists: ConnectionIO[Boolean]): F[AddResult] = {
|
||||
for {
|
||||
save <- transact(insert).attempt
|
||||
exist <- save.swap.traverse(ex => transact(exists).map(b => (ex, b)))
|
||||
} yield exist.swap match {
|
||||
case Right(_) => AddResult.Success
|
||||
case Left((_, true)) => AddResult.EntityExists("Adding failed, because the entity already exists.")
|
||||
case Left((ex, _)) => AddResult.Failure(ex)
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
package docspell.store.migrate
|
||||
|
||||
import cats.effect.Sync
|
||||
import docspell.store.JdbcConfig
|
||||
import org.flywaydb.core.Flyway
|
||||
import org.log4s._
|
||||
|
||||
object FlywayMigrate {
|
||||
private[this] val logger = getLogger
|
||||
|
||||
def run[F[_]: Sync](jdbc: JdbcConfig): F[Int] = Sync[F].delay {
|
||||
logger.info("Running db migrations...")
|
||||
val locations = jdbc.dbmsName match {
|
||||
case Some(dbtype) =>
|
||||
val name = if (dbtype == "h2") "postgresql" else dbtype
|
||||
List("classpath:db/migration/common", s"classpath:db/migration/${name}")
|
||||
case None =>
|
||||
logger.warn(s"Cannot read database name from jdbc url: ${jdbc.url}. Go with H2")
|
||||
List("classpath:db/migration/common", "classpath:db/h2")
|
||||
}
|
||||
|
||||
logger.info(s"Using migration locations: $locations")
|
||||
val fw = Flyway.configure().
|
||||
cleanDisabled(true).
|
||||
dataSource(jdbc.url.asString, jdbc.user, jdbc.password).
|
||||
locations(locations: _*).
|
||||
load()
|
||||
|
||||
fw.repair()
|
||||
fw.migrate()
|
||||
}
|
||||
}
|
36
modules/store/src/main/scala/docspell/store/ops/ONode.scala
Normal file
36
modules/store/src/main/scala/docspell/store/ops/ONode.scala
Normal file
@ -0,0 +1,36 @@
|
||||
package docspell.store.ops
|
||||
|
||||
import cats.effect.{Effect, Resource}
|
||||
import cats.implicits._
|
||||
import docspell.common.syntax.all._
|
||||
import docspell.common.{Ident, LenientUri, NodeType}
|
||||
import docspell.store.Store
|
||||
import docspell.store.records.RNode
|
||||
import org.log4s._
|
||||
|
||||
trait ONode[F[_]] {
|
||||
|
||||
def register(appId: Ident, nodeType: NodeType, uri: LenientUri): F[Unit]
|
||||
|
||||
def unregister(appId: Ident): F[Unit]
|
||||
}
|
||||
|
||||
object ONode {
|
||||
private[this] val logger = getLogger
|
||||
|
||||
def apply[F[_] : Effect](store: Store[F]): Resource[F, ONode[F]] =
|
||||
Resource.pure(new ONode[F] {
|
||||
|
||||
def register(appId: Ident, nodeType: NodeType, uri: LenientUri): F[Unit] =
|
||||
for {
|
||||
node <- RNode(appId, nodeType, uri)
|
||||
_ <- logger.finfo(s"Registering node $node")
|
||||
_ <- store.transact(RNode.set(node))
|
||||
} yield ()
|
||||
|
||||
def unregister(appId: Ident): F[Unit] =
|
||||
logger.finfo(s"Unregister app ${appId.id}") *>
|
||||
store.transact(RNode.delete(appId)).map(_ => ())
|
||||
})
|
||||
|
||||
}
|
@ -0,0 +1,70 @@
|
||||
package docspell.store.queries
|
||||
|
||||
import fs2.Stream
|
||||
import cats.implicits._
|
||||
import cats.effect.Sync
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common.{Ident, MetaProposalList}
|
||||
import docspell.store.Store
|
||||
import docspell.store.impl.Implicits._
|
||||
import docspell.store.records.{RAttachment, RAttachmentMeta, RItem}
|
||||
|
||||
object QAttachment {
|
||||
|
||||
def deleteById[F[_]: Sync](store: Store[F])(attachId: Ident, coll: Ident): F[Int] = {
|
||||
for {
|
||||
raOpt <- store.transact(RAttachment.findByIdAndCollective(attachId, coll))
|
||||
n <- raOpt.traverse(_ => store.transact(RAttachment.delete(attachId)))
|
||||
f <- Stream.emit(raOpt).
|
||||
unNoneTerminate.
|
||||
map(_.fileId.id).
|
||||
flatMap(store.bitpeace.delete).
|
||||
compile.last
|
||||
} yield n.getOrElse(0) + f.map(_ => 1).getOrElse(0)
|
||||
}
|
||||
|
||||
def deleteAttachment[F[_]: Sync](store: Store[F])(ra: RAttachment): F[Int] = {
|
||||
for {
|
||||
n <- store.transact(RAttachment.delete(ra.id))
|
||||
f <- Stream.emit(ra.fileId.id).
|
||||
flatMap(store.bitpeace.delete).
|
||||
compile.last
|
||||
} yield n + f.map(_ => 1).getOrElse(0)
|
||||
}
|
||||
|
||||
def deleteItemAttachments[F[_]: Sync](store: Store[F])(itemId: Ident, coll: Ident): F[Int] = {
|
||||
for {
|
||||
ras <- store.transact(RAttachment.findByItemAndCollective(itemId, coll))
|
||||
ns <- ras.traverse(deleteAttachment[F](store))
|
||||
} yield ns.sum
|
||||
}
|
||||
|
||||
def getMetaProposals(itemId: Ident, coll: Ident): ConnectionIO[MetaProposalList] = {
|
||||
val AC = RAttachment.Columns
|
||||
val MC = RAttachmentMeta.Columns
|
||||
val IC = RItem.Columns
|
||||
|
||||
val q = fr"SELECT" ++ MC.proposals.prefix("m").f ++ fr"FROM" ++ RAttachmentMeta.table ++ fr"m" ++
|
||||
fr"INNER JOIN" ++ RAttachment.table ++ fr"a ON" ++ AC.id.prefix("a").is(MC.id.prefix("m")) ++
|
||||
fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ AC.itemId.prefix("a").is(IC.id.prefix("i")) ++
|
||||
fr"WHERE" ++ and(AC.itemId.prefix("a").is(itemId), IC.cid.prefix("i").is(coll))
|
||||
|
||||
for {
|
||||
ml <- q.query[MetaProposalList].to[Vector]
|
||||
} yield MetaProposalList.flatten(ml)
|
||||
}
|
||||
|
||||
def getAttachmentMeta(attachId: Ident, collective: Ident): ConnectionIO[Option[RAttachmentMeta]] = {
|
||||
val AC = RAttachment.Columns
|
||||
val MC = RAttachmentMeta.Columns
|
||||
val IC = RItem.Columns
|
||||
|
||||
val q = fr"SELECT" ++ commas(MC.all.map(_.prefix("m").f)) ++ fr"FROM" ++ RItem.table ++ fr"i" ++
|
||||
fr"INNER JOIN" ++ RAttachment.table ++ fr"a ON" ++ IC.id.prefix("i").is(AC.itemId.prefix("a")) ++
|
||||
fr"INNER JOIN" ++ RAttachmentMeta.table ++ fr"m ON" ++ AC.id.prefix("a").is(MC.id.prefix("m")) ++
|
||||
fr"WHERE" ++ and(AC.id.prefix("a") is attachId, IC.cid.prefix("i") is collective)
|
||||
|
||||
q.query[RAttachmentMeta].option
|
||||
}
|
||||
}
|
@ -0,0 +1,45 @@
|
||||
package docspell.store.queries
|
||||
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common.{Direction, Ident}
|
||||
import docspell.store.impl.Implicits._
|
||||
import docspell.store.records.{RAttachment, RItem, RTag, RTagItem}
|
||||
|
||||
object QCollective {
|
||||
|
||||
case class InsightData( incoming: Int
|
||||
, outgoing: Int
|
||||
, bytes: Long
|
||||
, tags: Map[String, Int])
|
||||
|
||||
def getInsights(coll: Ident): ConnectionIO[InsightData] = {
|
||||
val IC = RItem.Columns
|
||||
val AC = RAttachment.Columns
|
||||
val TC = RTag.Columns
|
||||
val RC = RTagItem.Columns
|
||||
val q0 = selectCount(IC.id, RItem.table, and(IC.cid is coll, IC.incoming is Direction.incoming)).
|
||||
query[Int].unique
|
||||
val q1 = selectCount(IC.id, RItem.table, and(IC.cid is coll, IC.incoming is Direction.outgoing)).
|
||||
query[Int].unique
|
||||
|
||||
val q2 = fr"SELECT sum(m.length) FROM" ++ RItem.table ++ fr"i" ++
|
||||
fr"INNER JOIN" ++ RAttachment.table ++ fr"a ON" ++ AC.itemId.prefix("a").is(IC.id.prefix("i")) ++
|
||||
fr"INNER JOIN filemeta m ON m.id =" ++ AC.fileId.prefix("a").f ++
|
||||
fr"WHERE" ++ IC.cid.is(coll)
|
||||
|
||||
val q3 = fr"SELECT" ++ commas(TC.name.prefix("t").f,fr"count(" ++ RC.itemId.prefix("r").f ++ fr")") ++
|
||||
fr"FROM" ++ RTagItem.table ++ fr"r" ++
|
||||
fr"INNER JOIN" ++ RTag.table ++ fr"t ON" ++ RC.tagId.prefix("r").is(TC.tid.prefix("t")) ++
|
||||
fr"WHERE" ++ TC.cid.prefix("t").is(coll) ++
|
||||
fr"GROUP BY" ++ TC.name.prefix("t").f
|
||||
|
||||
for {
|
||||
n0 <- q0
|
||||
n1 <- q1
|
||||
n2 <- q2.query[Option[Long]].unique
|
||||
n3 <- q3.query[(String, Int)].to[Vector]
|
||||
} yield InsightData(n0, n1, n2.getOrElse(0), Map.from(n3))
|
||||
}
|
||||
|
||||
}
|
204
modules/store/src/main/scala/docspell/store/queries/QItem.scala
Normal file
204
modules/store/src/main/scala/docspell/store/queries/QItem.scala
Normal file
@ -0,0 +1,204 @@
|
||||
package docspell.store.queries
|
||||
|
||||
import bitpeace.FileMeta
|
||||
import cats.implicits._
|
||||
import cats.effect.Sync
|
||||
import fs2.Stream
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common.{IdRef, _}
|
||||
import docspell.store.Store
|
||||
import docspell.store.records._
|
||||
import docspell.store.impl.Implicits._
|
||||
import org.log4s._
|
||||
|
||||
object QItem {
|
||||
private [this] val logger = getLogger
|
||||
|
||||
case class ItemData( item: RItem
|
||||
, corrOrg: Option[ROrganization]
|
||||
, corrPerson: Option[RPerson]
|
||||
, concPerson: Option[RPerson]
|
||||
, concEquip: Option[REquipment]
|
||||
, inReplyTo: Option[IdRef]
|
||||
, tags: Vector[RTag]
|
||||
, attachments: Vector[(RAttachment, FileMeta)]) {
|
||||
|
||||
def filterCollective(coll: Ident): Option[ItemData] =
|
||||
if (item.cid == coll) Some(this) else None
|
||||
}
|
||||
|
||||
def findItem(id: Ident): ConnectionIO[Option[ItemData]] = {
|
||||
val IC = RItem.Columns.all.map(_.prefix("i"))
|
||||
val OC = ROrganization.Columns.all.map(_.prefix("o"))
|
||||
val P0C = RPerson.Columns.all.map(_.prefix("p0"))
|
||||
val P1C = RPerson.Columns.all.map(_.prefix("p1"))
|
||||
val EC = REquipment.Columns.all.map(_.prefix("e"))
|
||||
val ICC = List(RItem.Columns.id, RItem.Columns.name).map(_.prefix("ref"))
|
||||
|
||||
val cq = selectSimple(IC ++ OC ++ P0C ++ P1C ++ EC ++ ICC, RItem.table ++ fr"i", Fragment.empty) ++
|
||||
fr"LEFT JOIN" ++ ROrganization.table ++ fr"o ON" ++ RItem.Columns.corrOrg.prefix("i").is(ROrganization.Columns.oid.prefix("o")) ++
|
||||
fr"LEFT JOIN" ++ RPerson.table ++ fr"p0 ON" ++ RItem.Columns.corrPerson.prefix("i").is(RPerson.Columns.pid.prefix("p0")) ++
|
||||
fr"LEFT JOIN" ++ RPerson.table ++ fr"p1 ON" ++ RItem.Columns.concPerson.prefix("i").is(RPerson.Columns.pid.prefix("p1")) ++
|
||||
fr"LEFT JOIN" ++ REquipment.table ++ fr"e ON" ++ RItem.Columns.concEquipment.prefix("i").is(REquipment.Columns.eid.prefix("e")) ++
|
||||
fr"LEFT JOIN" ++ RItem.table ++ fr"ref ON" ++ RItem.Columns.inReplyTo.prefix("i").is(RItem.Columns.id.prefix("ref")) ++
|
||||
fr"WHERE" ++ RItem.Columns.id.prefix("i").is(id)
|
||||
|
||||
val q = cq.query[(RItem, Option[ROrganization], Option[RPerson], Option[RPerson], Option[REquipment], Option[IdRef])].option
|
||||
val attachs = RAttachment.findByItemWithMeta(id)
|
||||
|
||||
val tags = RTag.findByItem(id)
|
||||
|
||||
for {
|
||||
data <- q
|
||||
att <- attachs
|
||||
ts <- tags
|
||||
} yield data.map(d => ItemData(d._1, d._2, d._3, d._4, d._5, d._6, ts, att))
|
||||
}
|
||||
|
||||
|
||||
case class ListItem( id: Ident
|
||||
, name: String
|
||||
, state: ItemState
|
||||
, date: Timestamp
|
||||
, dueDate: Option[Timestamp]
|
||||
, source: String
|
||||
, direction: Direction
|
||||
, created: Timestamp
|
||||
, fileCount: Int
|
||||
, corrOrg: Option[IdRef]
|
||||
, corrPerson: Option[IdRef]
|
||||
, concPerson: Option[IdRef]
|
||||
, concEquip: Option[IdRef])
|
||||
|
||||
case class Query( collective: Ident
|
||||
, name: Option[String]
|
||||
, states: Seq[ItemState]
|
||||
, direction: Option[Direction]
|
||||
, corrPerson: Option[Ident]
|
||||
, corrOrg: Option[Ident]
|
||||
, concPerson: Option[Ident]
|
||||
, concEquip: Option[Ident]
|
||||
, tagsInclude: List[Ident]
|
||||
, tagsExclude: List[Ident]
|
||||
, dateFrom: Option[Timestamp]
|
||||
, dateTo: Option[Timestamp]
|
||||
, dueDateFrom: Option[Timestamp]
|
||||
, dueDateTo: Option[Timestamp])
|
||||
|
||||
def findItems(q: Query): Stream[ConnectionIO, ListItem] = {
|
||||
val IC = RItem.Columns
|
||||
val AC = RAttachment.Columns
|
||||
val PC = RPerson.Columns
|
||||
val OC = ROrganization.Columns
|
||||
val EC = REquipment.Columns
|
||||
val itemCols = IC.all
|
||||
val personCols = List(RPerson.Columns.pid, RPerson.Columns.name)
|
||||
val orgCols = List(ROrganization.Columns.oid, ROrganization.Columns.name)
|
||||
val equipCols = List(REquipment.Columns.eid, REquipment.Columns.name)
|
||||
|
||||
val finalCols = commas(IC.id.prefix("i").f
|
||||
, IC.name.prefix("i").f
|
||||
, IC.state.prefix("i").f
|
||||
, coalesce(IC.itemDate.prefix("i").f, IC.created.prefix("i").f)
|
||||
, IC.dueDate.prefix("i").f
|
||||
, IC.source.prefix("i").f
|
||||
, IC.incoming.prefix("i").f
|
||||
, IC.created.prefix("i").f
|
||||
, fr"COALESCE(a.num, 0)"
|
||||
, OC.oid.prefix("o0").f
|
||||
, OC.name.prefix("o0").f
|
||||
, PC.pid.prefix("p0").f
|
||||
, PC.name.prefix("p0").f
|
||||
, PC.pid.prefix("p1").f
|
||||
, PC.name.prefix("p1").f
|
||||
, EC.eid.prefix("e1").f
|
||||
, EC.name.prefix("e1").f
|
||||
)
|
||||
|
||||
val withItem = selectSimple(itemCols, RItem.table, IC.cid is q.collective)
|
||||
val withPerson = selectSimple(personCols, RPerson.table, PC.cid is q.collective)
|
||||
val withOrgs = selectSimple(orgCols, ROrganization.table, OC.cid is q.collective)
|
||||
val withEquips = selectSimple(equipCols, REquipment.table, EC.cid is q.collective)
|
||||
val withAttach = fr"SELECT COUNT(" ++ AC.id.f ++ fr") as num, " ++ AC.itemId.f ++
|
||||
fr"from" ++ RAttachment.table ++ fr"GROUP BY (" ++ AC.itemId.f ++ fr")"
|
||||
|
||||
val query = withCTE("items" -> withItem
|
||||
, "persons" -> withPerson
|
||||
, "orgs" -> withOrgs
|
||||
, "equips" -> withEquips
|
||||
, "attachs" -> withAttach) ++
|
||||
fr"SELECT DISTINCT" ++ finalCols ++ fr" FROM items i" ++
|
||||
fr"LEFT JOIN attachs a ON" ++ IC.id.prefix("i").is(AC.itemId.prefix("a")) ++
|
||||
fr"LEFT JOIN persons p0 ON" ++ IC.corrPerson.prefix("i").is(PC.pid.prefix("p0")) ++ // i.corrperson = p0.pid" ++
|
||||
fr"LEFT JOIN orgs o0 ON" ++ IC.corrOrg.prefix("i").is(OC.oid.prefix("o0")) ++ // i.corrorg = o0.oid" ++
|
||||
fr"LEFT JOIN persons p1 ON" ++ IC.concPerson.prefix("i").is(PC.pid.prefix("p1")) ++ // i.concperson = p1.pid" ++
|
||||
fr"LEFT JOIN equips e1 ON" ++ IC.concEquipment.prefix("i").is(EC.eid.prefix("e1")) // i.concequipment = e1.eid"
|
||||
|
||||
// inclusive tags are AND-ed
|
||||
val tagSelectsIncl = q.tagsInclude.map(tid =>
|
||||
selectSimple(List(RTagItem.Columns.itemId), RTagItem.table, RTagItem.Columns.tagId is tid)).
|
||||
map(f => sql"(" ++ f ++ sql") ")
|
||||
|
||||
// exclusive tags are OR-ed
|
||||
val tagSelectsExcl =
|
||||
if (q.tagsExclude.isEmpty) Fragment.empty
|
||||
else selectSimple(List(RTagItem.Columns.itemId), RTagItem.table, RTagItem.Columns.tagId isOneOf q.tagsExclude)
|
||||
|
||||
val name = q.name.map(queryWildcard)
|
||||
val cond = and(
|
||||
IC.cid.prefix("i") is q.collective,
|
||||
IC.state.prefix("i") isOneOf q.states,
|
||||
IC.incoming.prefix("i") isOrDiscard q.direction,
|
||||
name.map(n => IC.name.prefix("i").lowerLike(n)).getOrElse(Fragment.empty),
|
||||
RPerson.Columns.pid.prefix("p0") isOrDiscard q.corrPerson,
|
||||
ROrganization.Columns.oid.prefix("o0") isOrDiscard q.corrOrg,
|
||||
RPerson.Columns.pid.prefix("p1") isOrDiscard q.concPerson,
|
||||
REquipment.Columns.eid.prefix("e1") isOrDiscard q.concEquip,
|
||||
if (q.tagsInclude.isEmpty) Fragment.empty
|
||||
else IC.id.prefix("i") ++ sql" IN (" ++ tagSelectsIncl.reduce(_ ++ fr"INTERSECT" ++ _) ++ sql")",
|
||||
if (q.tagsExclude.isEmpty) Fragment.empty
|
||||
else IC.id.prefix("i").f ++ sql" NOT IN (" ++ tagSelectsExcl ++ sql")",
|
||||
q.dateFrom.map(d => coalesce(IC.itemDate.prefix("i").f, IC.created.prefix("i").f) ++ fr">= $d").getOrElse(Fragment.empty),
|
||||
q.dateTo.map(d => coalesce(IC.itemDate.prefix("i").f, IC.created.prefix("i").f) ++ fr"<= $d").getOrElse(Fragment.empty),
|
||||
q.dueDateFrom.map(d => IC.dueDate.prefix("i").isGt(d)).getOrElse(Fragment.empty),
|
||||
q.dueDateTo.map(d => IC.dueDate.prefix("i").isLt(d)).getOrElse(Fragment.empty)
|
||||
)
|
||||
|
||||
val order = orderBy(coalesce(IC.itemDate.prefix("i").f, IC.created.prefix("i").f) ++ fr"DESC")
|
||||
val frag = query ++ fr"WHERE" ++ cond ++ order
|
||||
logger.trace(s"List items: $frag")
|
||||
frag.query[ListItem].stream
|
||||
}
|
||||
|
||||
|
||||
def delete[F[_]: Sync](store: Store[F])(itemId: Ident, collective: Ident): F[Int] =
|
||||
for {
|
||||
tn <- store.transact(RTagItem.deleteItemTags(itemId))
|
||||
rn <- QAttachment.deleteItemAttachments(store)(itemId, collective)
|
||||
n <- store.transact(RItem.deleteByIdAndCollective(itemId, collective))
|
||||
} yield tn + rn + n
|
||||
|
||||
def findByFileIds(fileMetaIds: List[Ident]): ConnectionIO[Vector[RItem]] = {
|
||||
val IC = RItem.Columns
|
||||
val AC = RAttachment.Columns
|
||||
val q = fr"SELECT DISTINCT" ++ commas(IC.all.map(_.prefix("i").f)) ++ fr"FROM" ++ RItem.table ++ fr"i" ++
|
||||
fr"INNER JOIN" ++ RAttachment.table ++ fr"a ON" ++ AC.itemId.prefix("a").is(IC.id.prefix("i")) ++
|
||||
fr"WHERE" ++ AC.fileId.isOneOf(fileMetaIds) ++ orderBy(IC.created.prefix("i").asc)
|
||||
|
||||
q.query[RItem].to[Vector]
|
||||
}
|
||||
|
||||
private def queryWildcard(value: String): String = {
|
||||
def prefix(n: String) =
|
||||
if (n.startsWith("*")) s"%${n.substring(1)}"
|
||||
else n
|
||||
|
||||
def suffix(n: String) =
|
||||
if (n.endsWith("*")) s"${n.dropRight(1)}%"
|
||||
else n
|
||||
|
||||
prefix(suffix(value))
|
||||
}
|
||||
|
||||
}
|
184
modules/store/src/main/scala/docspell/store/queries/QJob.scala
Normal file
184
modules/store/src/main/scala/docspell/store/queries/QJob.scala
Normal file
@ -0,0 +1,184 @@
|
||||
package docspell.store.queries
|
||||
|
||||
import cats.effect.Effect
|
||||
import cats.implicits._
|
||||
import docspell.common._
|
||||
import docspell.common.syntax.all._
|
||||
import docspell.store.Store
|
||||
import docspell.store.impl.Implicits._
|
||||
import docspell.store.records.{RJob, RJobGroupUse, RJobLog}
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import fs2.Stream
|
||||
import org.log4s._
|
||||
|
||||
object QJob {
|
||||
private [this] val logger = getLogger
|
||||
|
||||
def takeNextJob[F[_]: Effect](store: Store[F])(priority: Ident => F[Priority], worker: Ident, retryPause: Duration): F[Option[RJob]] = {
|
||||
Stream.range(0, 10).
|
||||
evalMap(n => takeNextJob1(store)(priority, worker, retryPause, n)).
|
||||
evalTap({ x =>
|
||||
if (x.isLeft) logger.fdebug[F]("Cannot mark job, probably due to concurrent updates. Will retry.")
|
||||
else ().pure[F]
|
||||
}).
|
||||
find(_.isRight).
|
||||
flatMap({
|
||||
case Right(job) =>
|
||||
Stream.emit(job)
|
||||
case Left(_) =>
|
||||
Stream.eval(logger.fwarn[F]("Cannot mark job, even after retrying. Give up.")).map(_ => None)
|
||||
}).
|
||||
compile.last.map(_.flatten)
|
||||
}
|
||||
|
||||
private def takeNextJob1[F[_]: Effect](store: Store[F])( priority: Ident => F[Priority]
|
||||
, worker: Ident
|
||||
, retryPause: Duration
|
||||
, currentTry: Int): F[Either[Unit, Option[RJob]]] = {
|
||||
//if this fails, we have to restart takeNextJob
|
||||
def markJob(job: RJob): F[Either[Unit, RJob]] =
|
||||
store.transact(for {
|
||||
n <- RJob.setScheduled(job.id, worker)
|
||||
_ <- if (n == 1) RJobGroupUse.setGroup(RJobGroupUse(worker, job.group))
|
||||
else 0.pure[ConnectionIO]
|
||||
} yield if (n == 1) Right(job) else Left(()))
|
||||
|
||||
for {
|
||||
_ <- logger.ftrace[F](s"About to take next job (worker ${worker.id}), try $currentTry")
|
||||
now <- Timestamp.current[F]
|
||||
group <- store.transact(selectNextGroup(worker, now, retryPause))
|
||||
_ <- logger.ftrace[F](s"Choose group ${group.map(_.id)}")
|
||||
prio <- group.map(priority).getOrElse((Priority.Low: Priority).pure[F])
|
||||
_ <- logger.ftrace[F](s"Looking for job of prio $prio")
|
||||
job <- group.map(g => store.transact(selectNextJob(g, prio, retryPause, now))).getOrElse((None: Option[RJob]).pure[F])
|
||||
_ <- logger.ftrace[F](s"Found job: ${job.map(_.info)}")
|
||||
res <- job.traverse(j => markJob(j))
|
||||
} yield res.map(_.map(_.some)).getOrElse {
|
||||
if (group.isDefined) Left(()) // if a group was found, but no job someone else was faster
|
||||
else Right(None)
|
||||
}
|
||||
}
|
||||
|
||||
def selectNextGroup(worker: Ident, now: Timestamp, initialPause: Duration): ConnectionIO[Option[Ident]] = {
|
||||
val JC = RJob.Columns
|
||||
val waiting: JobState = JobState.Waiting
|
||||
val stuck: JobState = JobState.Stuck
|
||||
val jgroup = JC.group.prefix("a")
|
||||
val jstate = JC.state.prefix("a")
|
||||
val ugroup = RJobGroupUse.Columns.group.prefix("b")
|
||||
val uworker = RJobGroupUse.Columns.worker.prefix("b")
|
||||
|
||||
val stuckTrigger = coalesce(JC.startedmillis.prefix("a").f, sql"${now.toMillis}") ++
|
||||
fr"+" ++ power2(JC.retries.prefix("a")) ++ fr"* ${initialPause.millis}"
|
||||
|
||||
val stateCond = or(jstate is waiting, and(jstate is stuck, stuckTrigger ++ fr"< ${now.toMillis}"))
|
||||
|
||||
val sql1 = fr"SELECT" ++ jgroup.f ++ fr"as g FROM" ++ RJob.table ++ fr"a" ++
|
||||
fr"INNER JOIN" ++ RJobGroupUse.table ++ fr"b ON" ++ jgroup.isGt(ugroup) ++
|
||||
fr"WHERE" ++ and(uworker is worker, stateCond) ++
|
||||
fr"LIMIT 1" //LIMIT is not sql standard, but supported by h2,mariadb and postgres
|
||||
val sql2 = fr"SELECT min(" ++ jgroup.f ++ fr") as g FROM" ++ RJob.table ++ fr"a" ++
|
||||
fr"WHERE" ++ stateCond
|
||||
|
||||
val union = sql"SELECT g FROM ((" ++ sql1 ++ sql") UNION ALL (" ++ sql2 ++ sql")) as t0 WHERE g is not null"
|
||||
|
||||
union.query[Ident].to[List].map(_.headOption) // either one or two results, but may be empty if RJob table is empty
|
||||
}
|
||||
|
||||
def selectNextJob(group: Ident, prio: Priority, initialPause: Duration, now: Timestamp): ConnectionIO[Option[RJob]] = {
|
||||
val JC = RJob.Columns
|
||||
val psort =
|
||||
if (prio == Priority.High) JC.priority.desc
|
||||
else JC.priority.asc
|
||||
val waiting: JobState = JobState.Waiting
|
||||
val stuck: JobState = JobState.Stuck
|
||||
|
||||
val stuckTrigger = coalesce(JC.startedmillis.f, sql"${now.toMillis}") ++ fr"+" ++ power2(JC.retries) ++ fr"* ${initialPause.millis}"
|
||||
val sql = selectSimple(JC.all, RJob.table,
|
||||
and(JC.group is group, or(JC.state is waiting, and(JC.state is stuck, stuckTrigger ++ fr"< ${now.toMillis}")))) ++
|
||||
orderBy(JC.state.asc, psort, JC.submitted.asc) ++
|
||||
fr"LIMIT 1"
|
||||
|
||||
sql.query[RJob].option
|
||||
}
|
||||
|
||||
def setCancelled[F[_]: Effect](id: Ident, store: Store[F]): F[Unit] =
|
||||
for {
|
||||
now <- Timestamp.current[F]
|
||||
_ <- store.transact(RJob.setCancelled(id, now))
|
||||
} yield ()
|
||||
|
||||
def setFailed[F[_]: Effect](id: Ident, store: Store[F]): F[Unit] =
|
||||
for {
|
||||
now <- Timestamp.current[F]
|
||||
_ <- store.transact(RJob.setFailed(id, now))
|
||||
} yield ()
|
||||
|
||||
def setSuccess[F[_]: Effect](id: Ident, store: Store[F]): F[Unit] =
|
||||
for {
|
||||
now <- Timestamp.current[F]
|
||||
_ <- store.transact(RJob.setSuccess(id, now))
|
||||
} yield ()
|
||||
|
||||
def setStuck[F[_]: Effect](id: Ident, store: Store[F]): F[Unit] =
|
||||
for {
|
||||
now <- Timestamp.current[F]
|
||||
_ <- store.transact(RJob.setStuck(id, now))
|
||||
} yield ()
|
||||
|
||||
def setRunning[F[_]: Effect](id: Ident, workerId: Ident, store: Store[F]): F[Unit] =
|
||||
for {
|
||||
now <- Timestamp.current[F]
|
||||
_ <- store.transact(RJob.setRunning(id, workerId, now))
|
||||
} yield ()
|
||||
|
||||
def setFinalState[F[_]: Effect](id: Ident, state: JobState, store: Store[F]): F[Unit] =
|
||||
state match {
|
||||
case JobState.Success =>
|
||||
setSuccess(id, store)
|
||||
case JobState.Failed =>
|
||||
setFailed(id, store)
|
||||
case JobState.Cancelled =>
|
||||
setCancelled(id, store)
|
||||
case JobState.Stuck =>
|
||||
setStuck(id, store)
|
||||
case _ =>
|
||||
logger.ferror[F](s"Invalid final state: $state.")
|
||||
}
|
||||
|
||||
def exceedsRetries[F[_]: Effect](id: Ident, max: Int, store: Store[F]): F[Boolean] =
|
||||
store.transact(RJob.getRetries(id)).map(n => n.forall(_ >= max))
|
||||
|
||||
def runningToWaiting[F[_]: Effect](workerId: Ident, store: Store[F]): F[Unit] = {
|
||||
store.transact(RJob.setRunningToWaiting(workerId)).map(_ => ())
|
||||
}
|
||||
|
||||
def findAll[F[_]: Effect](ids: Seq[Ident], store: Store[F]): F[Vector[RJob]] =
|
||||
store.transact(RJob.findFromIds(ids))
|
||||
|
||||
def queueStateSnapshot(collective: Ident): Stream[ConnectionIO, (RJob, Vector[RJobLog])] = {
|
||||
val JC = RJob.Columns
|
||||
val waiting: Set[JobState] = Set(JobState.Waiting, JobState.Stuck, JobState.Scheduled)
|
||||
val running: Set[JobState] = Set(JobState.Running)
|
||||
val done = JobState.all.diff(waiting).diff(running)
|
||||
|
||||
def selectJobs(now: Timestamp): Stream[ConnectionIO, RJob] = {
|
||||
val refDate = now.minusHours(24)
|
||||
val sql = selectSimple(JC.all, RJob.table,
|
||||
and(JC.group is collective,
|
||||
or(and(JC.state.isOneOf(done.toSeq), JC.submitted isGt refDate)
|
||||
, JC.state.isOneOf((running ++ waiting).toSeq))))
|
||||
(sql ++ orderBy(JC.submitted.desc)).query[RJob].stream
|
||||
}
|
||||
|
||||
def selectLogs(job: RJob): ConnectionIO[Vector[RJobLog]] =
|
||||
RJobLog.findLogs(job.id)
|
||||
|
||||
for {
|
||||
now <- Stream.eval(Timestamp.current[ConnectionIO])
|
||||
job <- selectJobs(now)
|
||||
res <- Stream.eval(selectLogs(job))
|
||||
} yield (job, res)
|
||||
}
|
||||
}
|
@ -0,0 +1,36 @@
|
||||
package docspell.store.queries
|
||||
|
||||
import docspell.common._
|
||||
import docspell.store.impl.Implicits._
|
||||
import docspell.store.records.RCollective.{Columns => CC}
|
||||
import docspell.store.records.RUser.{Columns => UC}
|
||||
import docspell.store.records.{RCollective, RUser}
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import org.log4s._
|
||||
|
||||
object QLogin {
|
||||
private[this] val logger = getLogger
|
||||
|
||||
case class Data( account: AccountId
|
||||
, password: Password
|
||||
, collectiveState: CollectiveState
|
||||
, userState: UserState)
|
||||
|
||||
def findUser(acc: AccountId): ConnectionIO[Option[Data]] = {
|
||||
val ucid = UC.cid.prefix("u")
|
||||
val login = UC.login.prefix("u")
|
||||
val pass = UC.password.prefix("u")
|
||||
val ustate = UC.state.prefix("u")
|
||||
val cstate = CC.state.prefix("c")
|
||||
val ccid = CC.id.prefix("c")
|
||||
|
||||
val sql = selectSimple(
|
||||
List(ucid,login,pass,cstate,ustate),
|
||||
RUser.table ++ fr"u, " ++ RCollective.table ++ fr"c",
|
||||
and(ucid is ccid, login is acc.user, ucid is acc.collective))
|
||||
|
||||
logger.trace(s"SQL : $sql")
|
||||
sql.query[Data].option
|
||||
}
|
||||
}
|
@ -0,0 +1,86 @@
|
||||
package docspell.store.queries
|
||||
|
||||
import fs2._
|
||||
import cats.implicits._
|
||||
import doobie._
|
||||
import docspell.common._
|
||||
import docspell.store.{AddResult, Store}
|
||||
import docspell.store.impl.Column
|
||||
import docspell.store.records.ROrganization.{Columns => OC}
|
||||
import docspell.store.records.RPerson.{Columns => PC}
|
||||
import docspell.store.records._
|
||||
|
||||
object QOrganization {
|
||||
|
||||
def findOrgAndContact(coll: Ident, order: OC.type => Column): Stream[ConnectionIO, (ROrganization, Vector[RContact])] = {
|
||||
ROrganization.findAll(coll, order).
|
||||
evalMap(ro => RContact.findAllOrg(ro.oid).map(cs => (ro, cs)))
|
||||
}
|
||||
def findPersonAndContact(coll: Ident, order: PC.type => Column): Stream[ConnectionIO, (RPerson, Vector[RContact])] = {
|
||||
RPerson.findAll(coll, order).
|
||||
evalMap(ro => RContact.findAllPerson(ro.pid).map(cs => (ro, cs)))
|
||||
}
|
||||
|
||||
def addOrg[F[_]](org: ROrganization, contacts: Seq[RContact], cid: Ident): Store[F] => F[AddResult] = {
|
||||
val insert = for {
|
||||
n <- ROrganization.insert(org)
|
||||
cs <- contacts.toList.traverse(RContact.insert)
|
||||
} yield n + cs.sum
|
||||
|
||||
val exists = ROrganization.existsByName(cid, org.name)
|
||||
|
||||
store => store.add(insert, exists)
|
||||
}
|
||||
|
||||
def addPerson[F[_]](person: RPerson, contacts: Seq[RContact], cid: Ident): Store[F] => F[AddResult] = {
|
||||
val insert = for {
|
||||
n <- RPerson.insert(person)
|
||||
cs <- contacts.toList.traverse(RContact.insert)
|
||||
} yield n + cs.sum
|
||||
|
||||
val exists = RPerson.existsByName(cid, person.name)
|
||||
|
||||
store => store.add(insert, exists)
|
||||
}
|
||||
|
||||
def updateOrg[F[_]](org: ROrganization, contacts: Seq[RContact], cid: Ident): Store[F] => F[AddResult] = {
|
||||
val insert = for {
|
||||
n <- ROrganization.update(org)
|
||||
d <- RContact.deleteOrg(org.oid)
|
||||
cs <- contacts.toList.traverse(RContact.insert)
|
||||
} yield n + cs.sum + d
|
||||
|
||||
val exists = ROrganization.existsByName(cid, org.name)
|
||||
|
||||
store => store.add(insert, exists)
|
||||
}
|
||||
|
||||
def updatePerson[F[_]](person: RPerson, contacts: Seq[RContact], cid: Ident): Store[F] => F[AddResult] = {
|
||||
val insert = for {
|
||||
n <- RPerson.update(person)
|
||||
d <- RContact.deletePerson(person.pid)
|
||||
cs <- contacts.toList.traverse(RContact.insert)
|
||||
} yield n + cs.sum + d
|
||||
|
||||
val exists = RPerson.existsByName(cid, person.name)
|
||||
|
||||
store => store.add(insert, exists)
|
||||
}
|
||||
|
||||
def deleteOrg(orgId: Ident, collective: Ident): ConnectionIO[Int] = {
|
||||
for {
|
||||
n0 <- RItem.removeCorrOrg(collective, orgId)
|
||||
n1 <- RContact.deleteOrg(orgId)
|
||||
n2 <- ROrganization.delete(orgId, collective)
|
||||
} yield n0 + n1 + n2
|
||||
}
|
||||
|
||||
def deletePerson(personId: Ident, collective: Ident): ConnectionIO[Int] = {
|
||||
for {
|
||||
n0 <- RItem.removeCorrPerson(collective, personId)
|
||||
n1 <- RItem.removeConcPerson(collective, personId)
|
||||
n2 <- RContact.deletePerson(personId)
|
||||
n3 <- RPerson.delete(personId, collective)
|
||||
} yield n0 + n1 + n2 + n3
|
||||
}
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
package docspell.store.queue
|
||||
|
||||
import cats.implicits._
|
||||
import cats.effect.{Effect, Resource}
|
||||
import docspell.common._
|
||||
import docspell.common.syntax.all._
|
||||
import docspell.store.Store
|
||||
import docspell.store.queries.QJob
|
||||
import docspell.store.records.RJob
|
||||
import org.log4s._
|
||||
|
||||
trait JobQueue[F[_]] {
|
||||
|
||||
def insert(job: RJob): F[Unit]
|
||||
|
||||
def insertAll(jobs: Seq[RJob]): F[Unit]
|
||||
|
||||
def nextJob(prio: Ident => F[Priority], worker: Ident, retryPause: Duration): F[Option[RJob]]
|
||||
}
|
||||
|
||||
object JobQueue {
|
||||
private[this] val logger = getLogger
|
||||
|
||||
def apply[F[_] : Effect](store: Store[F]): Resource[F, JobQueue[F]] =
|
||||
Resource.pure(new JobQueue[F] {
|
||||
|
||||
def nextJob(prio: Ident => F[Priority], worker: Ident, retryPause: Duration): F[Option[RJob]] =
|
||||
logger.fdebug("Select next job") *> QJob.takeNextJob(store)(prio, worker, retryPause)
|
||||
|
||||
def insert(job: RJob): F[Unit] =
|
||||
store.transact(RJob.insert(job)).
|
||||
flatMap({ n =>
|
||||
if (n != 1) Effect[F].raiseError(new Exception(s"Inserting job failed. Update count: $n"))
|
||||
else ().pure[F]
|
||||
})
|
||||
|
||||
def insertAll(jobs: Seq[RJob]): F[Unit] =
|
||||
jobs.toList.traverse(j => insert(j).attempt).
|
||||
map(_.foreach {
|
||||
case Right(()) =>
|
||||
case Left(ex) =>
|
||||
logger.error(ex)("Could not insert job. Skipping it.")
|
||||
})
|
||||
|
||||
})
|
||||
}
|
@ -0,0 +1,71 @@
|
||||
package docspell.store.records
|
||||
|
||||
import bitpeace.FileMeta
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RAttachment( id: Ident
|
||||
, itemId: Ident
|
||||
, fileId: Ident
|
||||
, position: Int
|
||||
, created: Timestamp
|
||||
, name: Option[String]) {
|
||||
|
||||
}
|
||||
|
||||
object RAttachment {
|
||||
|
||||
val table = fr"attachment"
|
||||
|
||||
object Columns {
|
||||
val id = Column("attachid")
|
||||
val itemId = Column("itemid")
|
||||
val fileId = Column("filemetaid")
|
||||
val position = Column("position")
|
||||
val created = Column("created")
|
||||
val name = Column("name")
|
||||
val all = List(id, itemId, fileId, position, created, name)
|
||||
}
|
||||
import Columns._
|
||||
|
||||
def insert(v: RAttachment): ConnectionIO[Int] =
|
||||
insertRow(table, all, fr"${v.id},${v.itemId},${v.fileId.id},${v.position},${v.created},${v.name}").update.run
|
||||
|
||||
def findById(attachId: Ident): ConnectionIO[Option[RAttachment]] =
|
||||
selectSimple(all, table, id is attachId).query[RAttachment].option
|
||||
|
||||
def findByIdAndCollective(attachId: Ident, collective: Ident): ConnectionIO[Option[RAttachment]] = {
|
||||
selectSimple(all.map(_.prefix("a")), table ++ fr"a," ++ RItem.table ++ fr"i", and(
|
||||
fr"a.itemid = i.itemid",
|
||||
id.prefix("a") is attachId,
|
||||
RItem.Columns.cid.prefix("i") is collective
|
||||
)).query[RAttachment].option
|
||||
}
|
||||
|
||||
def findByItem(id: Ident): ConnectionIO[Vector[RAttachment]] =
|
||||
selectSimple(all, table, itemId is id).query[RAttachment].to[Vector]
|
||||
|
||||
def findByItemAndCollective(id: Ident, coll: Ident): ConnectionIO[Vector[RAttachment]] = {
|
||||
val q = selectSimple(all.map(_.prefix("a")), table ++ fr"a", Fragment.empty) ++
|
||||
fr"INNER JOIN" ++ RItem.table ++ fr"i ON" ++ RItem.Columns.id.prefix("i").is(itemId.prefix("a")) ++
|
||||
fr"WHERE" ++ and(itemId.prefix("a").is(id), RItem.Columns.cid.prefix("i").is(coll))
|
||||
q.query[RAttachment].to[Vector]
|
||||
}
|
||||
|
||||
def findByItemWithMeta(id: Ident): ConnectionIO[Vector[(RAttachment, FileMeta)]] = {
|
||||
import bitpeace.sql._
|
||||
|
||||
val q = fr"SELECT a.*,m.* FROM" ++ table ++ fr"a, filemeta m WHERE a.filemetaid = m.id AND a.itemid = $id ORDER BY a.position ASC"
|
||||
q.query[(RAttachment, FileMeta)].to[Vector]
|
||||
}
|
||||
|
||||
def delete(attachId: Ident): ConnectionIO[Int] =
|
||||
for {
|
||||
n0 <- RAttachmentMeta.delete(attachId)
|
||||
n1 <- deleteFrom(table, id is attachId).update.run
|
||||
} yield n0 + n1
|
||||
|
||||
}
|
@ -0,0 +1,62 @@
|
||||
package docspell.store.records
|
||||
|
||||
import cats.implicits._
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RAttachmentMeta(id: Ident
|
||||
, content: Option[String]
|
||||
, nerlabels: List[NerLabel]
|
||||
, proposals: MetaProposalList) {
|
||||
|
||||
}
|
||||
|
||||
object RAttachmentMeta {
|
||||
def empty(attachId: Ident) = RAttachmentMeta(attachId, None, Nil, MetaProposalList.empty)
|
||||
|
||||
val table = fr"attachmentmeta"
|
||||
|
||||
object Columns {
|
||||
val id = Column("attachid")
|
||||
val content = Column("content")
|
||||
val nerlabels = Column("nerlabels")
|
||||
val proposals = Column("itemproposals")
|
||||
val all = List(id, content, nerlabels, proposals)
|
||||
}
|
||||
import Columns._
|
||||
|
||||
def insert(v: RAttachmentMeta): ConnectionIO[Int] =
|
||||
insertRow(table, all, fr"${v.id},${v.content},${v.nerlabels},${v.proposals}").update.run
|
||||
|
||||
def exists(attachId: Ident): ConnectionIO[Boolean] =
|
||||
selectCount(id, table, id is attachId).query[Int].unique.map(_ > 0)
|
||||
|
||||
def upsert(v: RAttachmentMeta): ConnectionIO[Int] =
|
||||
for {
|
||||
n0 <- update(v)
|
||||
n1 <- if (n0 == 0) insert(v) else n0.pure[ConnectionIO]
|
||||
} yield n1
|
||||
|
||||
def update(v: RAttachmentMeta): ConnectionIO[Int] =
|
||||
updateRow(table, id is v.id, commas(
|
||||
content setTo v.content,
|
||||
nerlabels setTo v.nerlabels,
|
||||
proposals setTo v.proposals
|
||||
)).update.run
|
||||
|
||||
def updateLabels(mid: Ident, labels: List[NerLabel]): ConnectionIO[Int] =
|
||||
updateRow(table, id is mid, commas(
|
||||
nerlabels setTo labels
|
||||
)).update.run
|
||||
|
||||
def updateProposals(mid: Ident, plist: MetaProposalList): ConnectionIO[Int] =
|
||||
updateRow(table, id is mid, commas(
|
||||
proposals setTo plist
|
||||
)).update.run
|
||||
|
||||
def delete(attachId: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, id is attachId).update.run
|
||||
}
|
@ -0,0 +1,68 @@
|
||||
package docspell.store.records
|
||||
|
||||
import docspell.common._
|
||||
import docspell.store.impl.Column
|
||||
import docspell.store.impl.Implicits._
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import fs2.Stream
|
||||
|
||||
case class RCollective( id: Ident
|
||||
, state: CollectiveState
|
||||
, language: Language
|
||||
, created: Timestamp)
|
||||
|
||||
object RCollective {
|
||||
|
||||
val table = fr"collective"
|
||||
|
||||
object Columns {
|
||||
|
||||
val id = Column("cid")
|
||||
val state = Column("state")
|
||||
val language = Column("doclang")
|
||||
val created = Column("created")
|
||||
|
||||
val all = List(id, state, language, created)
|
||||
}
|
||||
|
||||
import Columns._
|
||||
|
||||
def insert(value: RCollective): ConnectionIO[Int] = {
|
||||
val sql = insertRow(table, Columns.all, fr"${value.id},${value.state},${value.language},${value.created}")
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def update(value: RCollective): ConnectionIO[Int] = {
|
||||
val sql = updateRow(table, id is value.id, commas(
|
||||
state setTo value.state
|
||||
))
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def findLanguage(cid: Ident): ConnectionIO[Option[Language]] =
|
||||
selectSimple(List(language), table, id is cid).query[Option[Language]].unique
|
||||
|
||||
def updateLanguage(cid: Ident, lang: Language): ConnectionIO[Int] =
|
||||
updateRow(table, id is cid, language setTo lang).update.run
|
||||
|
||||
def findById(cid: Ident): ConnectionIO[Option[RCollective]] = {
|
||||
val sql = selectSimple(all, table, id is cid)
|
||||
sql.query[RCollective].option
|
||||
}
|
||||
|
||||
def existsById(cid: Ident): ConnectionIO[Boolean] = {
|
||||
val sql = selectCount(id, table, id is cid)
|
||||
sql.query[Int].unique.map(_ > 0)
|
||||
}
|
||||
|
||||
def findAll(order: Columns.type => Column): ConnectionIO[Vector[RCollective]] = {
|
||||
val sql = selectSimple(all, table, Fragment.empty) ++ orderBy(order(Columns).f)
|
||||
sql.query[RCollective].to[Vector]
|
||||
}
|
||||
|
||||
def streamAll(order: Columns.type => Column): Stream[ConnectionIO, RCollective] = {
|
||||
val sql = selectSimple(all, table, Fragment.empty) ++ orderBy(order(Columns).f)
|
||||
sql.query[RCollective].stream
|
||||
}
|
||||
}
|
@ -0,0 +1,73 @@
|
||||
package docspell.store.records
|
||||
|
||||
import doobie._, doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RContact(
|
||||
contactId: Ident
|
||||
, value: String
|
||||
, kind: ContactKind
|
||||
, personId: Option[Ident]
|
||||
, orgId: Option[Ident]
|
||||
, created: Timestamp) {
|
||||
|
||||
}
|
||||
|
||||
object RContact {
|
||||
|
||||
val table = fr"contact"
|
||||
|
||||
object Columns {
|
||||
val contactId = Column("contactid")
|
||||
val value = Column("value")
|
||||
val kind = Column("kind")
|
||||
val personId = Column("pid")
|
||||
val orgId = Column("oid")
|
||||
val created = Column("created")
|
||||
val all = List(contactId, value,kind, personId, orgId, created)
|
||||
}
|
||||
|
||||
import Columns._
|
||||
|
||||
def insert(v: RContact): ConnectionIO[Int] = {
|
||||
val sql = insertRow(table, all,
|
||||
fr"${v.contactId},${v.value},${v.kind},${v.personId},${v.orgId},${v.created}")
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def update(v: RContact): ConnectionIO[Int] = {
|
||||
val sql = updateRow(table, contactId is v.contactId, commas(
|
||||
value setTo v.value,
|
||||
kind setTo v.kind,
|
||||
personId setTo v.personId,
|
||||
orgId setTo v.orgId
|
||||
))
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def delete(v: RContact): ConnectionIO[Int] =
|
||||
deleteFrom(table, contactId is v.contactId).update.run
|
||||
|
||||
def deleteOrg(oid: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, orgId is oid).update.run
|
||||
|
||||
def deletePerson(pid: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, personId is pid).update.run
|
||||
|
||||
def findById(id: Ident): ConnectionIO[Option[RContact]] = {
|
||||
val sql = selectSimple(all, table, contactId is id)
|
||||
sql.query[RContact].option
|
||||
}
|
||||
|
||||
def findAllPerson(pid: Ident): ConnectionIO[Vector[RContact]] = {
|
||||
val sql = selectSimple(all, table, personId is pid)
|
||||
sql.query[RContact].to[Vector]
|
||||
}
|
||||
|
||||
def findAllOrg(oid: Ident): ConnectionIO[Vector[RContact]] = {
|
||||
val sql = selectSimple(all, table, orgId is oid)
|
||||
sql.query[RContact].to[Vector]
|
||||
}
|
||||
}
|
@ -0,0 +1,65 @@
|
||||
package docspell.store.records
|
||||
|
||||
import doobie._, doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class REquipment(
|
||||
eid: Ident
|
||||
, cid: Ident
|
||||
, name: String
|
||||
, created: Timestamp) {
|
||||
|
||||
}
|
||||
|
||||
object REquipment {
|
||||
|
||||
val table = fr"equipment"
|
||||
|
||||
object Columns {
|
||||
val eid = Column("eid")
|
||||
val cid = Column("cid")
|
||||
val name = Column("name")
|
||||
val created = Column("created")
|
||||
val all = List(eid,cid,name,created)
|
||||
}
|
||||
import Columns._
|
||||
|
||||
def insert(v: REquipment): ConnectionIO[Int] = {
|
||||
val sql = insertRow(table, all,
|
||||
fr"${v.eid},${v.cid},${v.name},${v.created}")
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def update(v: REquipment): ConnectionIO[Int] = {
|
||||
val sql = updateRow(table, and(eid is v.eid, cid is v.cid), commas(
|
||||
cid setTo v.cid,
|
||||
name setTo v.name
|
||||
))
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def existsByName(coll: Ident, ename: String): ConnectionIO[Boolean] = {
|
||||
val sql = selectCount(eid, table, and(cid is coll, name is ename))
|
||||
sql.query[Int].unique.map(_ > 0)
|
||||
}
|
||||
|
||||
def findById(id: Ident): ConnectionIO[Option[REquipment]] = {
|
||||
val sql = selectSimple(all, table, eid is id)
|
||||
sql.query[REquipment].option
|
||||
}
|
||||
|
||||
def findAll(coll: Ident, order: Columns.type => Column): ConnectionIO[Vector[REquipment]] = {
|
||||
val sql = selectSimple(all, table, cid is coll) ++ orderBy(order(Columns).f)
|
||||
sql.query[REquipment].to[Vector]
|
||||
}
|
||||
|
||||
def findLike(coll: Ident, equipName: String): ConnectionIO[Vector[IdRef]] =
|
||||
selectSimple(List(eid, name), table, and(cid is coll,
|
||||
name.lowerLike(equipName))).
|
||||
query[IdRef].to[Vector]
|
||||
|
||||
def delete(id: Ident, coll: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, and(eid is id, cid is coll)).update.run
|
||||
}
|
@ -0,0 +1,53 @@
|
||||
package docspell.store.records
|
||||
|
||||
import cats.implicits._
|
||||
import cats.effect.Sync
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RInvitation(id: Ident, created: Timestamp) {
|
||||
|
||||
}
|
||||
|
||||
object RInvitation {
|
||||
|
||||
val table = fr"invitation"
|
||||
|
||||
object Columns {
|
||||
val id = Column("id")
|
||||
val created = Column("created")
|
||||
val all = List(id, created)
|
||||
}
|
||||
import Columns._
|
||||
|
||||
def generate[F[_]: Sync]: F[RInvitation] =
|
||||
for {
|
||||
c <- Timestamp.current[F]
|
||||
i <- Ident.randomId[F]
|
||||
} yield RInvitation(i, c)
|
||||
|
||||
def insert(v: RInvitation): ConnectionIO[Int] =
|
||||
insertRow(table, all, fr"${v.id},${v.created}").update.run
|
||||
|
||||
def insertNew: ConnectionIO[RInvitation] =
|
||||
generate[ConnectionIO].
|
||||
flatMap(v => insert(v).map(_ => v))
|
||||
|
||||
def findById(invite: Ident): ConnectionIO[Option[RInvitation]] =
|
||||
selectSimple(all, table, id is invite).query[RInvitation].option
|
||||
|
||||
def delete(invite: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, id is invite).update.run
|
||||
|
||||
def useInvite(invite: Ident, minCreated: Timestamp): ConnectionIO[Boolean] = {
|
||||
val get = selectCount(id, table, and(id is invite, created isGt minCreated)).
|
||||
query[Int].unique
|
||||
for {
|
||||
inv <- get
|
||||
_ <- delete(invite)
|
||||
} yield inv > 0
|
||||
}
|
||||
}
|
163
modules/store/src/main/scala/docspell/store/records/RItem.scala
Normal file
163
modules/store/src/main/scala/docspell/store/records/RItem.scala
Normal file
@ -0,0 +1,163 @@
|
||||
package docspell.store.records
|
||||
|
||||
import cats.implicits._
|
||||
import cats.effect.Sync
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RItem( id: Ident
|
||||
, cid: Ident
|
||||
, name: String
|
||||
, itemDate: Option[Timestamp]
|
||||
, source: String
|
||||
, direction: Direction
|
||||
, state: ItemState
|
||||
, corrOrg: Option[Ident]
|
||||
, corrPerson: Option[Ident]
|
||||
, concPerson: Option[Ident]
|
||||
, concEquipment: Option[Ident]
|
||||
, inReplyTo: Option[Ident]
|
||||
, dueDate: Option[Timestamp]
|
||||
, created: Timestamp
|
||||
, updated: Timestamp
|
||||
, notes: Option[String]) {
|
||||
|
||||
}
|
||||
|
||||
object RItem {
|
||||
|
||||
def newItem[F[_]: Sync](cid: Ident, name: String, source: String, direction: Direction, state: ItemState): F[RItem] =
|
||||
for {
|
||||
now <- Timestamp.current[F]
|
||||
id <- Ident.randomId[F]
|
||||
} yield RItem(id, cid, name, None, source, direction, state, None, None, None, None, None, None, now, now, None)
|
||||
|
||||
val table = fr"item"
|
||||
|
||||
object Columns {
|
||||
val id = Column("itemid")
|
||||
val cid = Column("cid")
|
||||
val name = Column("name")
|
||||
val itemDate = Column("itemdate")
|
||||
val source = Column("source")
|
||||
val incoming = Column("incoming")
|
||||
val state = Column("state")
|
||||
val corrOrg = Column("corrorg")
|
||||
val corrPerson = Column("corrperson")
|
||||
val concPerson = Column("concperson")
|
||||
val concEquipment = Column("concequipment")
|
||||
val inReplyTo = Column("inreplyto")
|
||||
val dueDate = Column("duedate")
|
||||
val created = Column("created")
|
||||
val updated = Column("updated")
|
||||
val notes = Column("notes")
|
||||
val all = List(id, cid, name, itemDate, source, incoming, state, corrOrg,
|
||||
corrPerson, concPerson, concEquipment, inReplyTo, dueDate, created, updated, notes)
|
||||
}
|
||||
import Columns._
|
||||
|
||||
def insert(v: RItem): ConnectionIO[Int] =
|
||||
insertRow(table, all, fr"${v.id},${v.cid},${v.name},${v.itemDate},${v.source},${v.direction},${v.state}," ++
|
||||
fr"${v.corrOrg},${v.corrPerson},${v.concPerson},${v.concEquipment},${v.inReplyTo},${v.dueDate}," ++
|
||||
fr"${v.created},${v.updated},${v.notes}").update.run
|
||||
|
||||
def getCollective(itemId: Ident): ConnectionIO[Option[Ident]] =
|
||||
selectSimple(List(cid), table, id is itemId).query[Ident].option
|
||||
|
||||
def updateState(itemId: Ident, itemState: ItemState): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, id is itemId, commas(state setTo itemState, updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def updateStateForCollective(itemId: Ident, itemState: ItemState, coll: Ident): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(id is itemId, cid is coll), commas(state setTo itemState, updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def updateDirection(itemId: Ident, coll: Ident, dir: Direction): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(id is itemId, cid is coll), commas(incoming setTo dir, updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
|
||||
def updateCorrOrg(itemId: Ident, coll: Ident, org: Option[Ident]): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(id is itemId, cid is coll), commas(corrOrg setTo org, updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def removeCorrOrg(coll: Ident, currentOrg: Ident): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(cid is coll, corrOrg is Some(currentOrg)), commas(corrOrg setTo(None: Option[Ident]), updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def updateCorrPerson(itemId: Ident, coll: Ident, person: Option[Ident]): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(id is itemId, cid is coll), commas(corrPerson setTo person, updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def removeCorrPerson(coll: Ident, currentPerson: Ident): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(cid is coll, corrPerson is Some(currentPerson)), commas(corrPerson setTo(None: Option[Ident]), updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def updateConcPerson(itemId: Ident, coll: Ident, person: Option[Ident]): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(id is itemId, cid is coll), commas(concPerson setTo person, updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def removeConcPerson(coll: Ident, currentPerson: Ident): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(cid is coll, concPerson is Some(currentPerson)), commas(concPerson setTo(None: Option[Ident]), updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def updateConcEquip(itemId: Ident, coll: Ident, equip: Option[Ident]): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(id is itemId, cid is coll), commas(concEquipment setTo equip, updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def removeConcEquip(coll: Ident, currentEquip: Ident): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(cid is coll, concEquipment is Some(currentEquip)), commas(concPerson setTo(None: Option[Ident]), updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def updateNotes(itemId: Ident, coll: Ident, text: Option[String]): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(id is itemId, cid is coll), commas(notes setTo text, updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def updateName(itemId: Ident, coll: Ident, itemName: String): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(id is itemId, cid is coll), commas(name setTo itemName, updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def updateDate(itemId: Ident, coll: Ident, date: Option[Timestamp]): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(id is itemId, cid is coll), commas(itemDate setTo date, updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def updateDueDate(itemId: Ident, coll: Ident, date: Option[Timestamp]): ConnectionIO[Int] =
|
||||
for {
|
||||
t <- currentTime
|
||||
n <- updateRow(table, and(id is itemId, cid is coll), commas(dueDate setTo date, updated setTo t)).update.run
|
||||
} yield n
|
||||
|
||||
def deleteByIdAndCollective(itemId: Ident, coll: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, and(id is itemId, cid is coll)).update.run
|
||||
}
|
165
modules/store/src/main/scala/docspell/store/records/RJob.scala
Normal file
165
modules/store/src/main/scala/docspell/store/records/RJob.scala
Normal file
@ -0,0 +1,165 @@
|
||||
package docspell.store.records
|
||||
|
||||
import cats.effect.Sync
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl.Column
|
||||
import docspell.store.impl.Implicits._
|
||||
import io.circe.Encoder
|
||||
|
||||
case class RJob(id: Ident
|
||||
, task: Ident
|
||||
, group: Ident
|
||||
, args: String
|
||||
, subject: String
|
||||
, submitted: Timestamp
|
||||
, submitter: Ident
|
||||
, priority: Priority
|
||||
, state: JobState
|
||||
, retries: Int
|
||||
, progress: Int
|
||||
, tracker: Option[Ident]
|
||||
, worker: Option[Ident]
|
||||
, started: Option[Timestamp]
|
||||
, finished: Option[Timestamp]) {
|
||||
|
||||
def info: String =
|
||||
s"${id.id.substring(0, 9)}.../${group.id}/${task.id}/$priority"
|
||||
}
|
||||
|
||||
object RJob {
|
||||
|
||||
def newJob[A](id: Ident
|
||||
, task: Ident
|
||||
, group: Ident
|
||||
, args: A
|
||||
, subject: String
|
||||
, submitted: Timestamp
|
||||
, submitter: Ident
|
||||
, priority: Priority
|
||||
, tracker: Option[Ident])(implicit E: Encoder[A]): RJob =
|
||||
RJob(id, task, group, E(args).noSpaces, subject, submitted, submitter, priority, JobState.Waiting, 0, 0, tracker, None, None, None)
|
||||
|
||||
val table = fr"job"
|
||||
|
||||
object Columns {
|
||||
val id = Column("jid")
|
||||
val task = Column("task")
|
||||
val group = Column("group_")
|
||||
val args = Column("args")
|
||||
val subject = Column("subject")
|
||||
val submitted = Column("submitted")
|
||||
val submitter = Column("submitter")
|
||||
val priority = Column("priority")
|
||||
val state = Column("state")
|
||||
val retries = Column("retries")
|
||||
val progress = Column("progress")
|
||||
val tracker = Column("tracker")
|
||||
val worker = Column("worker")
|
||||
val started = Column("started")
|
||||
val startedmillis = Column("startedmillis")
|
||||
val finished = Column("finished")
|
||||
val all = List(id,task,group,args,subject,submitted,submitter,priority,state,retries,progress,tracker,worker,started,finished)
|
||||
}
|
||||
|
||||
import Columns._
|
||||
|
||||
def insert(v: RJob): ConnectionIO[Int] = {
|
||||
val smillis = v.started.map(_.toMillis)
|
||||
val sql = insertRow(table, all ++ List(startedmillis),
|
||||
fr"${v.id},${v.task},${v.group},${v.args},${v.subject},${v.submitted},${v.submitter},${v.priority},${v.state},${v.retries},${v.progress},${v.tracker},${v.worker},${v.started},${v.finished},$smillis")
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def findFromIds(ids: Seq[Ident]): ConnectionIO[Vector[RJob]] = {
|
||||
if (ids.isEmpty) Sync[ConnectionIO].pure(Vector.empty[RJob])
|
||||
else selectSimple(all, table, id isOneOf ids).query[RJob].to[Vector]
|
||||
}
|
||||
|
||||
def findByIdAndGroup(jobId: Ident, jobGroup: Ident): ConnectionIO[Option[RJob]] =
|
||||
selectSimple(all, table, and(id is jobId, group is jobGroup)).query[RJob].option
|
||||
|
||||
def setRunningToWaiting(workerId: Ident): ConnectionIO[Int] = {
|
||||
val states: Seq[JobState] = List(JobState.Running, JobState.Scheduled)
|
||||
updateRow(table, and(worker is workerId, state isOneOf states),
|
||||
state setTo (JobState.Waiting: JobState)).update.run
|
||||
}
|
||||
|
||||
def incrementRetries(jobid: Ident): ConnectionIO[Int] =
|
||||
updateRow(table, and(id is jobid, state is (JobState.Stuck: JobState)),
|
||||
retries.f ++ fr"=" ++ retries.f ++ fr"+ 1").update.run
|
||||
|
||||
def setRunning(jobId: Ident, workerId: Ident, now: Timestamp): ConnectionIO[Int] =
|
||||
updateRow(table, id is jobId, commas(
|
||||
state setTo (JobState.Running: JobState),
|
||||
started setTo now,
|
||||
startedmillis setTo now.toMillis,
|
||||
worker setTo workerId
|
||||
)).update.run
|
||||
|
||||
def setWaiting(jobId: Ident): ConnectionIO[Int] =
|
||||
updateRow(table, id is jobId, commas(
|
||||
state setTo (JobState.Waiting: JobState),
|
||||
started setTo (None: Option[Timestamp]),
|
||||
startedmillis setTo (None: Option[Long]),
|
||||
finished setTo (None: Option[Timestamp])
|
||||
)).update.run
|
||||
|
||||
def setScheduled(jobId: Ident, workerId: Ident): ConnectionIO[Int] = {
|
||||
for {
|
||||
_ <- incrementRetries(jobId)
|
||||
n <- updateRow(table, and(id is jobId, or(worker isNull, worker is workerId), state isOneOf Seq[JobState](JobState.Waiting, JobState.Stuck)), commas(
|
||||
state setTo (JobState.Scheduled: JobState),
|
||||
worker setTo workerId
|
||||
)).update.run
|
||||
} yield n
|
||||
}
|
||||
|
||||
def setSuccess(jobId: Ident, now: Timestamp): ConnectionIO[Int] =
|
||||
updateRow(table, id is jobId, commas(
|
||||
state setTo (JobState.Success: JobState),
|
||||
finished setTo now
|
||||
)).update.run
|
||||
|
||||
def setStuck(jobId: Ident, now: Timestamp): ConnectionIO[Int] =
|
||||
updateRow(table, id is jobId, commas(
|
||||
state setTo (JobState.Stuck: JobState),
|
||||
finished setTo now
|
||||
)).update.run
|
||||
|
||||
def setFailed(jobId: Ident, now: Timestamp): ConnectionIO[Int] =
|
||||
updateRow(table, id is jobId, commas(
|
||||
state setTo (JobState.Failed: JobState),
|
||||
finished setTo now
|
||||
)).update.run
|
||||
|
||||
def setCancelled(jobId: Ident, now: Timestamp): ConnectionIO[Int] =
|
||||
updateRow(table, id is jobId, commas(
|
||||
state setTo (JobState.Cancelled: JobState),
|
||||
finished setTo now
|
||||
)).update.run
|
||||
|
||||
def getRetries(jobId: Ident): ConnectionIO[Option[Int]] =
|
||||
selectSimple(List(retries), table, id is jobId).query[Int].option
|
||||
|
||||
def setProgress(jobId: Ident, perc: Int): ConnectionIO[Int] =
|
||||
updateRow(table, id is jobId, progress setTo perc).update.run
|
||||
|
||||
def selectWaiting: ConnectionIO[Option[RJob]] = {
|
||||
val sql = selectSimple(all, table, state is (JobState.Waiting: JobState))
|
||||
sql.query[RJob].to[Vector].map(_.headOption)
|
||||
}
|
||||
|
||||
def selectGroupInState(states: Seq[JobState]): ConnectionIO[Vector[Ident]] = {
|
||||
val sql = selectDistinct(List(group), table, state isOneOf states) ++ orderBy(group.f)
|
||||
sql.query[Ident].to[Vector]
|
||||
}
|
||||
|
||||
def delete(jobId: Ident): ConnectionIO[Int] = {
|
||||
for {
|
||||
n0 <- RJobLog.deleteAll(jobId)
|
||||
n1 <- deleteFrom(table, id is jobId).update.run
|
||||
} yield n0 + n1
|
||||
}
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
package docspell.store.records
|
||||
|
||||
import cats.implicits._
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl.Column
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RJobGroupUse(groupId: Ident, workerId: Ident) {
|
||||
|
||||
}
|
||||
|
||||
object RJobGroupUse {
|
||||
|
||||
val table = fr"jobgroupuse"
|
||||
|
||||
object Columns {
|
||||
val group = Column("groupid")
|
||||
val worker = Column("workerid")
|
||||
val all = List(group, worker)
|
||||
}
|
||||
import Columns._
|
||||
|
||||
def insert(v: RJobGroupUse): ConnectionIO[Int] =
|
||||
insertRow(table, all, fr"${v.groupId},${v.workerId}").update.run
|
||||
|
||||
def updateGroup(v: RJobGroupUse): ConnectionIO[Int] =
|
||||
updateRow(table, worker is v.workerId, group setTo v.groupId).update.run
|
||||
|
||||
def setGroup(v: RJobGroupUse): ConnectionIO[Int] = {
|
||||
updateGroup(v).flatMap(n => if (n > 0) n.pure[ConnectionIO] else insert(v))
|
||||
}
|
||||
|
||||
def findGroup(workerId: Ident): ConnectionIO[Option[Ident]] =
|
||||
selectSimple(List(group), table, worker is workerId).query[Ident].option
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
package docspell.store.records
|
||||
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl.Column
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RJobLog( id: Ident
|
||||
, jobId: Ident
|
||||
, level: LogLevel
|
||||
, created: Timestamp
|
||||
, message: String) {
|
||||
|
||||
}
|
||||
|
||||
object RJobLog {
|
||||
|
||||
val table = fr"joblog"
|
||||
|
||||
object Columns {
|
||||
val id = Column("id")
|
||||
val jobId = Column("jid")
|
||||
val level = Column("level")
|
||||
val created = Column("created")
|
||||
val message = Column("message")
|
||||
val all = List(id, jobId, level, created, message)
|
||||
}
|
||||
import Columns._
|
||||
|
||||
def insert(v: RJobLog): ConnectionIO[Int] =
|
||||
insertRow(table, all, fr"${v.id},${v.jobId},${v.level},${v.created},${v.message}").update.run
|
||||
|
||||
def findLogs(id: Ident): ConnectionIO[Vector[RJobLog]] =
|
||||
(selectSimple(all, table, jobId is id) ++ orderBy(created.asc)).query[RJobLog].to[Vector]
|
||||
|
||||
def deleteAll(job: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, jobId is job).update.run
|
||||
}
|
@ -0,0 +1,57 @@
|
||||
package docspell.store.records
|
||||
|
||||
import cats.effect.Sync
|
||||
import cats.implicits._
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl.Column
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
|
||||
case class RNode(id: Ident, nodeType: NodeType, url: LenientUri, updated: Timestamp, created: Timestamp) {
|
||||
|
||||
}
|
||||
|
||||
object RNode {
|
||||
|
||||
def apply[F[_]: Sync](id: Ident, nodeType: NodeType, uri: LenientUri): F[RNode] =
|
||||
Timestamp.current[F].map(now => RNode(id, nodeType, uri, now, now))
|
||||
|
||||
val table = fr"node"
|
||||
|
||||
object Columns {
|
||||
val id = Column("id")
|
||||
val nodeType = Column("type")
|
||||
val url = Column("url")
|
||||
val updated = Column("updated")
|
||||
val created = Column("created")
|
||||
val all = List(id,nodeType,url,updated,created)
|
||||
}
|
||||
import Columns._
|
||||
|
||||
def insert(v: RNode): ConnectionIO[Int] =
|
||||
insertRow(table, all, fr"${v.id},${v.nodeType},${v.url},${v.updated},${v.created}").update.run
|
||||
|
||||
def update(v: RNode): ConnectionIO[Int] =
|
||||
updateRow(table, id is v.id, commas(
|
||||
nodeType setTo v.nodeType,
|
||||
url setTo v.url,
|
||||
updated setTo v.updated
|
||||
)).update.run
|
||||
|
||||
def set(v: RNode): ConnectionIO[Int] =
|
||||
for {
|
||||
n <- update(v)
|
||||
k <- if (n == 0) insert(v) else 0.pure[ConnectionIO]
|
||||
} yield n + k
|
||||
|
||||
def delete(appId: Ident): ConnectionIO[Int] =
|
||||
(fr"DELETE FROM" ++ table ++ where(id is appId)).update.run
|
||||
|
||||
def findAll(nt: NodeType): ConnectionIO[Vector[RNode]] =
|
||||
selectSimple(all, table, nodeType is nt).query[RNode].to[Vector]
|
||||
|
||||
def findById(nodeId: Ident): ConnectionIO[Option[RNode]] =
|
||||
selectSimple(all, table, id is nodeId).query[RNode].option
|
||||
}
|
@ -0,0 +1,103 @@
|
||||
package docspell.store.records
|
||||
|
||||
import fs2.Stream
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common.{IdRef, _}
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class ROrganization(
|
||||
oid: Ident
|
||||
, cid: Ident
|
||||
, name: String
|
||||
, street: String
|
||||
, zip: String
|
||||
, city: String
|
||||
, country: String
|
||||
, notes: Option[String]
|
||||
, created: Timestamp) {
|
||||
|
||||
}
|
||||
|
||||
object ROrganization {
|
||||
|
||||
val table = fr"organization"
|
||||
|
||||
object Columns {
|
||||
val oid = Column("oid")
|
||||
val cid = Column("cid")
|
||||
val name = Column("name")
|
||||
val street = Column("street")
|
||||
val zip = Column("zip")
|
||||
val city = Column("city")
|
||||
val country = Column("country")
|
||||
val notes = Column("notes")
|
||||
val created = Column("created")
|
||||
val all = List(oid, cid, name, street, zip, city, country, notes, created)
|
||||
}
|
||||
|
||||
import Columns._
|
||||
|
||||
def insert(v: ROrganization): ConnectionIO[Int] = {
|
||||
val sql = insertRow(table, all,
|
||||
fr"${v.oid},${v.cid},${v.name},${v.street},${v.zip},${v.city},${v.country},${v.notes},${v.created}")
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def update(v: ROrganization): ConnectionIO[Int] = {
|
||||
val sql = updateRow(table, and(oid is v.oid, cid is v.cid), commas(
|
||||
cid setTo v.cid,
|
||||
name setTo v.name,
|
||||
street setTo v.street,
|
||||
zip setTo v.zip,
|
||||
city setTo v.city,
|
||||
country setTo v.country,
|
||||
notes setTo v.notes
|
||||
))
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def existsByName(coll: Ident, oname: String): ConnectionIO[Boolean] =
|
||||
selectCount(oid, table, and(cid is coll, name is oname)).query[Int].unique.map(_ > 0)
|
||||
|
||||
def findById(id: Ident): ConnectionIO[Option[ROrganization]] = {
|
||||
val sql = selectSimple(all, table, cid is id)
|
||||
sql.query[ROrganization].option
|
||||
}
|
||||
|
||||
def find(coll: Ident, orgName: String): ConnectionIO[Option[ROrganization]] = {
|
||||
val sql = selectSimple(all, table, and(cid is coll, name is orgName))
|
||||
sql.query[ROrganization].option
|
||||
}
|
||||
|
||||
def findLike(coll: Ident, orgName: String): ConnectionIO[Vector[IdRef]] =
|
||||
selectSimple(List(oid, name), table, and(cid is coll,
|
||||
name.lowerLike(orgName))).
|
||||
query[IdRef].to[Vector]
|
||||
|
||||
def findLike(coll: Ident, contactKind: ContactKind, value: String): ConnectionIO[Vector[IdRef]] = {
|
||||
val CC = RContact.Columns
|
||||
val q = fr"SELECT DISTINCT" ++ commas(oid.prefix("o").f, name.prefix("o").f) ++
|
||||
fr"FROM" ++ table ++ fr"o" ++
|
||||
fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.orgId.prefix("c").is(oid.prefix("o")) ++
|
||||
fr"WHERE" ++ and(cid.prefix("o").is(coll)
|
||||
, CC.kind.prefix("c") is contactKind
|
||||
, CC.value.prefix("c").lowerLike(value))
|
||||
|
||||
q.query[IdRef].to[Vector]
|
||||
}
|
||||
|
||||
def findAll(coll: Ident, order: Columns.type => Column): Stream[ConnectionIO, ROrganization] = {
|
||||
val sql = selectSimple(all, table, cid is coll) ++ orderBy(order(Columns).f)
|
||||
sql.query[ROrganization].stream
|
||||
}
|
||||
|
||||
def findAllRef(coll: Ident, order: Columns.type => Column): ConnectionIO[Vector[IdRef]] = {
|
||||
val sql = selectSimple(List(oid, name), table, cid is coll) ++ orderBy(order(Columns).f)
|
||||
sql.query[IdRef].to[Vector]
|
||||
}
|
||||
|
||||
def delete(id: Ident, coll: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, and(oid is id, cid is coll)).update.run
|
||||
}
|
@ -0,0 +1,108 @@
|
||||
package docspell.store.records
|
||||
|
||||
import fs2.Stream
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common.{IdRef, _}
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RPerson(
|
||||
pid: Ident
|
||||
, cid: Ident
|
||||
, name: String
|
||||
, street: String
|
||||
, zip: String
|
||||
, city: String
|
||||
, country: String
|
||||
, notes: Option[String]
|
||||
, concerning: Boolean
|
||||
, created: Timestamp) {
|
||||
|
||||
}
|
||||
|
||||
object RPerson {
|
||||
|
||||
val table = fr"person"
|
||||
|
||||
object Columns {
|
||||
val pid = Column("pid")
|
||||
val cid = Column("cid")
|
||||
val name = Column("name")
|
||||
val street = Column("street")
|
||||
val zip = Column("zip")
|
||||
val city = Column("city")
|
||||
val country = Column("country")
|
||||
val notes = Column("notes")
|
||||
val concerning = Column("concerning")
|
||||
val created = Column("created")
|
||||
val all = List(pid, cid, name, street, zip, city, country, notes, concerning, created)
|
||||
}
|
||||
|
||||
import Columns._
|
||||
|
||||
def insert(v: RPerson): ConnectionIO[Int] = {
|
||||
val sql = insertRow(table, all,
|
||||
fr"${v.pid},${v.cid},${v.name},${v.street},${v.zip},${v.city},${v.country},${v.notes},${v.concerning},${v.created}")
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def update(v: RPerson): ConnectionIO[Int] = {
|
||||
val sql = updateRow(table, and(pid is v.pid, cid is v.cid), commas(
|
||||
cid setTo v.cid,
|
||||
name setTo v.name,
|
||||
street setTo v.street,
|
||||
zip setTo v.zip,
|
||||
city setTo v.city,
|
||||
country setTo v.country,
|
||||
concerning setTo v.concerning,
|
||||
notes setTo v.notes
|
||||
))
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def existsByName(coll: Ident, pname: String): ConnectionIO[Boolean] =
|
||||
selectCount(pid, table, and(cid is coll, name is pname)).query[Int].unique.map(_ > 0)
|
||||
|
||||
def findById(id: Ident): ConnectionIO[Option[RPerson]] = {
|
||||
val sql = selectSimple(all, table, cid is id)
|
||||
sql.query[RPerson].option
|
||||
}
|
||||
|
||||
def find(coll: Ident, personName: String): ConnectionIO[Option[RPerson]] = {
|
||||
val sql = selectSimple(all, table, and(cid is coll, name is personName))
|
||||
sql.query[RPerson].option
|
||||
}
|
||||
|
||||
def findLike(coll: Ident, personName: String, concerningOnly: Boolean): ConnectionIO[Vector[IdRef]] =
|
||||
selectSimple(List(pid, name), table, and(cid is coll,
|
||||
concerning is concerningOnly,
|
||||
name.lowerLike(personName))).
|
||||
query[IdRef].to[Vector]
|
||||
|
||||
def findLike(coll: Ident, contactKind: ContactKind, value: String, concerningOnly: Boolean): ConnectionIO[Vector[IdRef]] = {
|
||||
val CC = RContact.Columns
|
||||
val q = fr"SELECT DISTINCT" ++ commas(pid.prefix("p").f, name.prefix("p").f) ++
|
||||
fr"FROM" ++ table ++ fr"p" ++
|
||||
fr"INNER JOIN" ++ RContact.table ++ fr"c ON" ++ CC.personId.prefix("c").is(pid.prefix("p")) ++
|
||||
fr"WHERE" ++ and(cid.prefix("p").is(coll)
|
||||
, CC.kind.prefix("c") is contactKind
|
||||
, concerning.prefix("p") is concerningOnly
|
||||
, CC.value.prefix("c").lowerLike(value))
|
||||
|
||||
q.query[IdRef].to[Vector]
|
||||
}
|
||||
|
||||
def findAll(coll: Ident, order: Columns.type => Column): Stream[ConnectionIO, RPerson] = {
|
||||
val sql = selectSimple(all, table, cid is coll) ++ orderBy(order(Columns).f)
|
||||
sql.query[RPerson].stream
|
||||
}
|
||||
|
||||
def findAllRef(coll: Ident, order: Columns.type => Column): ConnectionIO[Vector[IdRef]] = {
|
||||
val sql = selectSimple(List(pid, name), table, cid is coll) ++ orderBy(order(Columns).f)
|
||||
sql.query[IdRef].to[Vector]
|
||||
}
|
||||
|
||||
def delete(personId: Ident, coll: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, and(pid is personId, cid is coll)).update.run
|
||||
}
|
@ -0,0 +1,87 @@
|
||||
package docspell.store.records
|
||||
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RSource(
|
||||
sid: Ident
|
||||
, cid: Ident
|
||||
, abbrev: String
|
||||
, description: Option[String]
|
||||
, counter: Int
|
||||
, enabled: Boolean
|
||||
, priority: Priority
|
||||
, created: Timestamp) {
|
||||
|
||||
}
|
||||
|
||||
object RSource {
|
||||
|
||||
val table = fr"source"
|
||||
|
||||
object Columns {
|
||||
|
||||
val sid = Column("sid")
|
||||
val cid = Column("cid")
|
||||
val abbrev = Column("abbrev")
|
||||
val description = Column("description")
|
||||
val counter = Column("counter")
|
||||
val enabled = Column("enabled")
|
||||
val priority = Column("priority")
|
||||
val created = Column("created")
|
||||
|
||||
val all = List(sid,cid,abbrev,description,counter,enabled,priority,created)
|
||||
}
|
||||
|
||||
import Columns._
|
||||
|
||||
def insert(v: RSource): ConnectionIO[Int] = {
|
||||
val sql = insertRow(table, all,
|
||||
fr"${v.sid},${v.cid},${v.abbrev},${v.description},${v.counter},${v.enabled},${v.priority},${v.created}")
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def updateNoCounter(v: RSource): ConnectionIO[Int] = {
|
||||
val sql = updateRow(table, and(sid is v.sid, cid is v.cid), commas(
|
||||
cid setTo v.cid,
|
||||
abbrev setTo v.abbrev,
|
||||
description setTo v.description,
|
||||
enabled setTo v.enabled,
|
||||
priority setTo v.priority
|
||||
))
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def incrementCounter(source: String, coll: Ident): ConnectionIO[Int] =
|
||||
updateRow(table, and(abbrev is source, cid is coll), counter.f ++ fr"=" ++ counter.f ++ fr"+ 1").update.run
|
||||
|
||||
def existsById(id: Ident): ConnectionIO[Boolean] = {
|
||||
val sql = selectCount(sid, table, sid is id)
|
||||
sql.query[Int].unique.map(_ > 0)
|
||||
}
|
||||
|
||||
def existsByAbbrev(coll: Ident, abb: String): ConnectionIO[Boolean] = {
|
||||
val sql = selectCount(sid, table, and(cid is coll, abbrev is abb))
|
||||
sql.query[Int].unique.map(_ > 0)
|
||||
}
|
||||
|
||||
|
||||
def find(id: Ident): ConnectionIO[Option[RSource]] = {
|
||||
val sql = selectSimple(all, table, sid is id)
|
||||
sql.query[RSource].option
|
||||
}
|
||||
|
||||
def findCollective(sourceId: Ident): ConnectionIO[Option[Ident]] =
|
||||
selectSimple(List(cid), table, sid is sourceId).query[Ident].option
|
||||
|
||||
def findAll(coll: Ident, order: Columns.type => Column): ConnectionIO[Vector[RSource]] = {
|
||||
val sql = selectSimple(all, table, cid is coll) ++ orderBy(order(Columns).f)
|
||||
sql.query[RSource].to[Vector]
|
||||
}
|
||||
|
||||
def delete(sourceId: Ident, coll: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, and(sid is sourceId, cid is coll)).update.run
|
||||
}
|
@ -0,0 +1,76 @@
|
||||
package docspell.store.records
|
||||
|
||||
import doobie._, doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RTag(
|
||||
tagId: Ident
|
||||
, collective: Ident
|
||||
, name: String
|
||||
, category: Option[String]
|
||||
, created: Timestamp) {
|
||||
|
||||
}
|
||||
|
||||
object RTag {
|
||||
|
||||
val table = fr"tag"
|
||||
|
||||
object Columns {
|
||||
val tid = Column("tid")
|
||||
val cid = Column("cid")
|
||||
val name = Column("name")
|
||||
val category = Column("category")
|
||||
val created = Column("created")
|
||||
val all = List(tid,cid,name,category,created)
|
||||
}
|
||||
import Columns._
|
||||
|
||||
def insert(v: RTag): ConnectionIO[Int] = {
|
||||
val sql = insertRow(table, all,
|
||||
fr"${v.tagId},${v.collective},${v.name},${v.category},${v.created}")
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def update(v: RTag): ConnectionIO[Int] = {
|
||||
val sql = updateRow(table, and(tid is v.tagId, cid is v.collective), commas(
|
||||
cid setTo v.collective,
|
||||
name setTo v.name,
|
||||
category setTo v.category
|
||||
))
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def findById(id: Ident): ConnectionIO[Option[RTag]] = {
|
||||
val sql = selectSimple(all, table, tid is id)
|
||||
sql.query[RTag].option
|
||||
}
|
||||
|
||||
def findByIdAndCollective(id: Ident, coll: Ident): ConnectionIO[Option[RTag]] = {
|
||||
val sql = selectSimple(all, table, and(tid is id, cid is coll))
|
||||
sql.query[RTag].option
|
||||
}
|
||||
|
||||
def existsByName(tag: RTag): ConnectionIO[Boolean] = {
|
||||
val sql = selectCount(tid, table, and(cid is tag.collective, name is tag.name, category is tag.category))
|
||||
sql.query[Int].unique.map(_ > 0)
|
||||
}
|
||||
|
||||
def findAll(coll: Ident, order: Columns.type => Column): ConnectionIO[Vector[RTag]] = {
|
||||
val sql = selectSimple(all, table, cid is coll) ++ orderBy(order(Columns).f)
|
||||
sql.query[RTag].to[Vector]
|
||||
}
|
||||
|
||||
def findByItem(itemId: Ident): ConnectionIO[Vector[RTag]] = {
|
||||
val rcol = all.map(_.prefix("t"))
|
||||
(selectSimple(rcol, table ++ fr"t," ++ RTagItem.table ++ fr"i", and(
|
||||
RTagItem.Columns.itemId.prefix("i") is itemId,
|
||||
RTagItem.Columns.tagId.prefix("i").is(tid.prefix("t"))
|
||||
)) ++ orderBy(name.prefix("t").asc)).query[RTag].to[Vector]
|
||||
}
|
||||
|
||||
def delete(tagId: Ident, coll: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, and(tid is tagId, cid is coll)).update.run
|
||||
}
|
@ -0,0 +1,42 @@
|
||||
package docspell.store.records
|
||||
|
||||
import cats.implicits._
|
||||
import doobie._
|
||||
import doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RTagItem(tagItemId: Ident, itemId: Ident, tagId: Ident) {
|
||||
|
||||
}
|
||||
|
||||
object RTagItem {
|
||||
|
||||
val table = fr"tagitem"
|
||||
|
||||
object Columns {
|
||||
val tagItemId = Column("tagitemid")
|
||||
val itemId = Column("itemid")
|
||||
val tagId = Column("tid")
|
||||
val all = List(tagItemId, itemId, tagId)
|
||||
}
|
||||
import Columns._
|
||||
|
||||
def insert(v: RTagItem): ConnectionIO[Int] =
|
||||
insertRow(table, all, fr"${v.tagItemId},${v.itemId},${v.tagId}").update.run
|
||||
|
||||
def deleteItemTags(item: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, itemId is item).update.run
|
||||
|
||||
def deleteTag(tid: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, tagId is tid).update.run
|
||||
|
||||
def insertItemTags(item: Ident, tags: Seq[Ident]): ConnectionIO[Int] = {
|
||||
for {
|
||||
tagValues <- tags.toList.traverse(id => Ident.randomId[ConnectionIO].map(rid => RTagItem(rid, item, id)))
|
||||
tagFrag = tagValues.map(v => fr"${v.tagItemId},${v.itemId},${v.tagId}")
|
||||
ins <- insertRows(table, all, tagFrag).update.run
|
||||
} yield ins
|
||||
}
|
||||
}
|
@ -0,0 +1,99 @@
|
||||
package docspell.store.records
|
||||
|
||||
import doobie._, doobie.implicits._
|
||||
import docspell.common._
|
||||
import docspell.store.impl._
|
||||
import docspell.store.impl.Implicits._
|
||||
|
||||
case class RUser(
|
||||
uid: Ident
|
||||
, login: Ident
|
||||
, cid: Ident
|
||||
, password: Password
|
||||
, state: UserState
|
||||
, email: Option[String]
|
||||
, loginCount: Int
|
||||
, lastLogin: Option[Timestamp]
|
||||
, created: Timestamp) {
|
||||
|
||||
}
|
||||
|
||||
object RUser {
|
||||
|
||||
val table = fr"user_"
|
||||
|
||||
object Columns {
|
||||
val uid = Column("uid")
|
||||
val cid = Column("cid")
|
||||
val login = Column("login")
|
||||
val password = Column("password")
|
||||
val state = Column("state")
|
||||
val email = Column("email")
|
||||
val loginCount = Column("logincount")
|
||||
val lastLogin = Column("lastlogin")
|
||||
val created = Column("created")
|
||||
|
||||
val all = List(uid
|
||||
,login
|
||||
,cid
|
||||
,password
|
||||
,state
|
||||
,email
|
||||
,loginCount
|
||||
,lastLogin
|
||||
,created)
|
||||
}
|
||||
|
||||
import Columns._
|
||||
|
||||
def insert(v: RUser): ConnectionIO[Int] = {
|
||||
val sql = insertRow(table, Columns.all,
|
||||
fr"${v.uid},${v.login},${v.cid},${v.password},${v.state},${v.email},${v.loginCount},${v.lastLogin},${v.created}")
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def update(v: RUser): ConnectionIO[Int] = {
|
||||
val sql = updateRow(table, and(login is v.login, cid is v.cid), commas(
|
||||
state setTo v.state,
|
||||
email setTo v.email,
|
||||
loginCount setTo v.loginCount,
|
||||
lastLogin setTo v.lastLogin
|
||||
))
|
||||
sql.update.run
|
||||
}
|
||||
|
||||
def exists(loginName: Ident): ConnectionIO[Boolean] = {
|
||||
selectCount(uid, table, login is loginName).query[Int].unique.map(_ > 0)
|
||||
}
|
||||
|
||||
def findByAccount(aid: AccountId): ConnectionIO[Option[RUser]] = {
|
||||
val sql = selectSimple(all, table, and(cid is aid.collective, login is aid.user))
|
||||
sql.query[RUser].option
|
||||
}
|
||||
|
||||
def findById(userId: Ident): ConnectionIO[Option[RUser]] = {
|
||||
val sql = selectSimple(all, table, uid is userId)
|
||||
sql.query[RUser].option
|
||||
}
|
||||
|
||||
def findAll(coll: Ident, order: Columns.type => Column): ConnectionIO[Vector[RUser]] = {
|
||||
val sql = selectSimple(all, table, cid is coll) ++ orderBy(order(Columns).f)
|
||||
sql.query[RUser].to[Vector]
|
||||
}
|
||||
|
||||
def updateLogin(accountId: AccountId): ConnectionIO[Int] =
|
||||
currentTime.flatMap(t => updateRow(table
|
||||
, and(cid is accountId.collective, login is accountId.user)
|
||||
, commas(
|
||||
loginCount.f ++ fr"=" ++ loginCount.f ++ fr"+ 1",
|
||||
lastLogin setTo t
|
||||
)).update.run)
|
||||
|
||||
def updatePassword(accountId: AccountId, hashedPass: Password): ConnectionIO[Int] =
|
||||
updateRow(table
|
||||
, and(cid is accountId.collective, login is accountId.user)
|
||||
, password setTo hashedPass).update.run
|
||||
|
||||
def delete(user: Ident, coll: Ident): ConnectionIO[Int] =
|
||||
deleteFrom(table, and(cid is coll, login is user)).update.run
|
||||
}
|
Reference in New Issue
Block a user