mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-04-05 19:09:32 +00:00
Merge pull request #1579 from eikek/fix/item-mixed-search
Fix/item mixed search
This commit is contained in:
commit
7d6e2d8459
23
build.sbt
23
build.sbt
@ -471,6 +471,17 @@ val addonlib = project
|
|||||||
)
|
)
|
||||||
.dependsOn(common, files, loggingScribe)
|
.dependsOn(common, files, loggingScribe)
|
||||||
|
|
||||||
|
val ftsclient = project
|
||||||
|
.in(file("modules/fts-client"))
|
||||||
|
.disablePlugins(RevolverPlugin)
|
||||||
|
.settings(sharedSettings)
|
||||||
|
.withTestSettings
|
||||||
|
.settings(
|
||||||
|
name := "docspell-fts-client",
|
||||||
|
libraryDependencies ++= Seq.empty
|
||||||
|
)
|
||||||
|
.dependsOn(common, loggingScribe)
|
||||||
|
|
||||||
val store = project
|
val store = project
|
||||||
.in(file("modules/store"))
|
.in(file("modules/store"))
|
||||||
.disablePlugins(RevolverPlugin)
|
.disablePlugins(RevolverPlugin)
|
||||||
@ -500,6 +511,7 @@ val store = project
|
|||||||
files,
|
files,
|
||||||
notificationApi,
|
notificationApi,
|
||||||
jsonminiq,
|
jsonminiq,
|
||||||
|
ftsclient,
|
||||||
loggingScribe
|
loggingScribe
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -623,17 +635,6 @@ val analysis = project
|
|||||||
)
|
)
|
||||||
.dependsOn(common, files % "test->test", loggingScribe)
|
.dependsOn(common, files % "test->test", loggingScribe)
|
||||||
|
|
||||||
val ftsclient = project
|
|
||||||
.in(file("modules/fts-client"))
|
|
||||||
.disablePlugins(RevolverPlugin)
|
|
||||||
.settings(sharedSettings)
|
|
||||||
.withTestSettings
|
|
||||||
.settings(
|
|
||||||
name := "docspell-fts-client",
|
|
||||||
libraryDependencies ++= Seq.empty
|
|
||||||
)
|
|
||||||
.dependsOn(common, loggingScribe)
|
|
||||||
|
|
||||||
val ftssolr = project
|
val ftssolr = project
|
||||||
.in(file("modules/fts-solr"))
|
.in(file("modules/fts-solr"))
|
||||||
.disablePlugins(RevolverPlugin)
|
.disablePlugins(RevolverPlugin)
|
||||||
|
@ -20,7 +20,7 @@ class AddonExecutorTest extends CatsEffectSuite with Fixtures with TestLoggingCo
|
|||||||
val logger = docspell.logging.getLogger[IO]
|
val logger = docspell.logging.getLogger[IO]
|
||||||
|
|
||||||
override def docspellLogConfig =
|
override def docspellLogConfig =
|
||||||
super.docspellLogConfig.copy(minimumLevel = Level.Trace)
|
super.docspellLogConfig.copy(minimumLevel = Level.Error)
|
||||||
|
|
||||||
tempDir.test("select docker if Dockerfile exists") { dir =>
|
tempDir.test("select docker if Dockerfile exists") { dir =>
|
||||||
for {
|
for {
|
||||||
|
@ -12,6 +12,7 @@ import docspell.backend.BackendCommands.EventContext
|
|||||||
import docspell.backend.auth.Login
|
import docspell.backend.auth.Login
|
||||||
import docspell.backend.fulltext.CreateIndex
|
import docspell.backend.fulltext.CreateIndex
|
||||||
import docspell.backend.ops._
|
import docspell.backend.ops._
|
||||||
|
import docspell.backend.ops.search.OSearch
|
||||||
import docspell.backend.signup.OSignup
|
import docspell.backend.signup.OSignup
|
||||||
import docspell.common.bc.BackendCommandRunner
|
import docspell.common.bc.BackendCommandRunner
|
||||||
import docspell.ftsclient.FtsClient
|
import docspell.ftsclient.FtsClient
|
||||||
@ -58,6 +59,7 @@ trait BackendApp[F[_]] {
|
|||||||
def itemLink: OItemLink[F]
|
def itemLink: OItemLink[F]
|
||||||
def downloadAll: ODownloadAll[F]
|
def downloadAll: ODownloadAll[F]
|
||||||
def addons: OAddons[F]
|
def addons: OAddons[F]
|
||||||
|
def search: OSearch[F]
|
||||||
|
|
||||||
def commands(eventContext: Option[EventContext]): BackendCommandRunner[F, Unit]
|
def commands(eventContext: Option[EventContext]): BackendCommandRunner[F, Unit]
|
||||||
}
|
}
|
||||||
@ -130,6 +132,7 @@ object BackendApp {
|
|||||||
joexImpl
|
joexImpl
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
searchImpl <- Resource.pure(OSearch(store, ftsClient))
|
||||||
} yield new BackendApp[F] {
|
} yield new BackendApp[F] {
|
||||||
val pubSub = pubSubT
|
val pubSub = pubSubT
|
||||||
val login = loginImpl
|
val login = loginImpl
|
||||||
@ -162,6 +165,7 @@ object BackendApp {
|
|||||||
val downloadAll = downloadAllImpl
|
val downloadAll = downloadAllImpl
|
||||||
val addons = addonsImpl
|
val addons = addonsImpl
|
||||||
val attachment = attachImpl
|
val attachment = attachImpl
|
||||||
|
val search = searchImpl
|
||||||
|
|
||||||
def commands(eventContext: Option[EventContext]) =
|
def commands(eventContext: Option[EventContext]) =
|
||||||
BackendCommands.fromBackend(this, eventContext)
|
BackendCommands.fromBackend(this, eventContext)
|
||||||
|
@ -181,7 +181,7 @@ object OFulltext {
|
|||||||
q = Query
|
q = Query
|
||||||
.all(account)
|
.all(account)
|
||||||
.withFix(_.copy(query = itemIdsQuery.some))
|
.withFix(_.copy(query = itemIdsQuery.some))
|
||||||
res <- store.transact(QItem.searchStats(now.toUtcDate)(q))
|
res <- store.transact(QItem.searchStats(now.toUtcDate, None)(q))
|
||||||
} yield res
|
} yield res
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -242,7 +242,7 @@ object OFulltext {
|
|||||||
.getOrElse(Attr.ItemId.notExists)
|
.getOrElse(Attr.ItemId.notExists)
|
||||||
qnext = q.withFix(_.copy(query = itemIdsQuery.some))
|
qnext = q.withFix(_.copy(query = itemIdsQuery.some))
|
||||||
now <- Timestamp.current[F]
|
now <- Timestamp.current[F]
|
||||||
res <- store.transact(QItem.searchStats(now.toUtcDate)(qnext))
|
res <- store.transact(QItem.searchStats(now.toUtcDate, None)(qnext))
|
||||||
} yield res
|
} yield res
|
||||||
|
|
||||||
// Helper
|
// Helper
|
||||||
|
@ -180,7 +180,7 @@ object OItemSearch {
|
|||||||
Timestamp
|
Timestamp
|
||||||
.current[F]
|
.current[F]
|
||||||
.map(_.toUtcDate)
|
.map(_.toUtcDate)
|
||||||
.flatMap(today => store.transact(QItem.searchStats(today)(q)))
|
.flatMap(today => store.transact(QItem.searchStats(today, None)(q)))
|
||||||
|
|
||||||
def findAttachment(id: Ident, collective: Ident): F[Option[AttachmentData[F]]] =
|
def findAttachment(id: Ident, collective: Ident): F[Option[AttachmentData[F]]] =
|
||||||
store
|
store
|
||||||
|
@ -0,0 +1,244 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.ops.search
|
||||||
|
|
||||||
|
import java.time.LocalDate
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
import cats.{Functor, ~>}
|
||||||
|
import fs2.Stream
|
||||||
|
|
||||||
|
import docspell.backend.ops.OItemSearch.{ListItemWithTags, SearchSummary}
|
||||||
|
import docspell.common.{AccountId, Duration, SearchMode}
|
||||||
|
import docspell.ftsclient.{FtsClient, FtsQuery}
|
||||||
|
import docspell.query.{FulltextExtract, ItemQuery, ItemQueryParser}
|
||||||
|
import docspell.store.Store
|
||||||
|
import docspell.store.fts.RFtsResult
|
||||||
|
import docspell.store.qb.Batch
|
||||||
|
import docspell.store.queries._
|
||||||
|
|
||||||
|
import doobie.{ConnectionIO, WeakAsync}
|
||||||
|
|
||||||
|
/** Combine fulltext search and sql search into one operation.
|
||||||
|
*
|
||||||
|
* To allow for paging the results from fulltext search are brought into the sql database
|
||||||
|
* by creating a temporary table.
|
||||||
|
*/
|
||||||
|
trait OSearch[F[_]] {
|
||||||
|
|
||||||
|
/** Searches at sql database with the given query joining it optionally with results
|
||||||
|
* from fulltext search. Any "fulltext search" query node is discarded. It is assumed
|
||||||
|
* that the fulltext search node has been extracted into the argument.
|
||||||
|
*/
|
||||||
|
def search(maxNoteLen: Int, today: LocalDate, batch: Batch)(
|
||||||
|
q: Query,
|
||||||
|
fulltextQuery: Option[String]
|
||||||
|
): F[Vector[ListItem]]
|
||||||
|
|
||||||
|
/** Same as `search` above, but runs additionally queries per item (!) to retrieve more
|
||||||
|
* details.
|
||||||
|
*/
|
||||||
|
def searchWithDetails(
|
||||||
|
maxNoteLen: Int,
|
||||||
|
today: LocalDate,
|
||||||
|
batch: Batch
|
||||||
|
)(
|
||||||
|
q: Query,
|
||||||
|
fulltextQuery: Option[String]
|
||||||
|
): F[Vector[ListItemWithTags]]
|
||||||
|
|
||||||
|
/** Selects either `search` or `searchWithDetails`. For the former the items are filled
|
||||||
|
* with empty details.
|
||||||
|
*/
|
||||||
|
final def searchSelect(
|
||||||
|
withDetails: Boolean,
|
||||||
|
maxNoteLen: Int,
|
||||||
|
today: LocalDate,
|
||||||
|
batch: Batch
|
||||||
|
)(
|
||||||
|
q: Query,
|
||||||
|
fulltextQuery: Option[String]
|
||||||
|
)(implicit F: Functor[F]): F[Vector[ListItemWithTags]] =
|
||||||
|
if (withDetails) searchWithDetails(maxNoteLen, today, batch)(q, fulltextQuery)
|
||||||
|
else search(maxNoteLen, today, batch)(q, fulltextQuery).map(_.map(_.toWithTags))
|
||||||
|
|
||||||
|
/** Run multiple database calls with the give query to collect a summary. */
|
||||||
|
def searchSummary(
|
||||||
|
today: LocalDate
|
||||||
|
)(q: Query, fulltextQuery: Option[String]): F[SearchSummary]
|
||||||
|
|
||||||
|
/** Parses a query string and creates a `Query` object, to be used with the other
|
||||||
|
* methods. The query object contains the parsed query amended with more conditions to
|
||||||
|
* restrict to valid items only (as specified with `mode`).
|
||||||
|
*/
|
||||||
|
def parseQueryString(
|
||||||
|
accountId: AccountId,
|
||||||
|
mode: SearchMode,
|
||||||
|
qs: String
|
||||||
|
): QueryParseResult
|
||||||
|
}
|
||||||
|
|
||||||
|
object OSearch {
|
||||||
|
def apply[F[_]: Async](
|
||||||
|
store: Store[F],
|
||||||
|
ftsClient: FtsClient[F]
|
||||||
|
): OSearch[F] =
|
||||||
|
new OSearch[F] {
|
||||||
|
private[this] val logger = docspell.logging.getLogger[F]
|
||||||
|
|
||||||
|
def parseQueryString(
|
||||||
|
accountId: AccountId,
|
||||||
|
mode: SearchMode,
|
||||||
|
qs: String
|
||||||
|
): QueryParseResult = {
|
||||||
|
val validItemQuery =
|
||||||
|
mode match {
|
||||||
|
case SearchMode.Trashed => ItemQuery.Expr.Trashed
|
||||||
|
case SearchMode.Normal => ItemQuery.Expr.ValidItemStates
|
||||||
|
case SearchMode.All => ItemQuery.Expr.ValidItemsOrTrashed
|
||||||
|
}
|
||||||
|
|
||||||
|
if (qs.trim.isEmpty) {
|
||||||
|
val qf = Query.Fix(accountId, Some(validItemQuery), None)
|
||||||
|
val qq = Query.QueryExpr(None)
|
||||||
|
val q = Query(qf, qq)
|
||||||
|
QueryParseResult.Success(q, None)
|
||||||
|
} else
|
||||||
|
ItemQueryParser.parse(qs) match {
|
||||||
|
case Right(iq) =>
|
||||||
|
FulltextExtract.findFulltext(iq.expr) match {
|
||||||
|
case FulltextExtract.Result.SuccessNoFulltext(expr) =>
|
||||||
|
val qf = Query.Fix(accountId, Some(validItemQuery), None)
|
||||||
|
val qq = Query.QueryExpr(expr)
|
||||||
|
val q = Query(qf, qq)
|
||||||
|
QueryParseResult.Success(q, None)
|
||||||
|
|
||||||
|
case FulltextExtract.Result.SuccessNoExpr(fts) =>
|
||||||
|
val qf = Query.Fix(accountId, Some(validItemQuery), Option(_.byScore))
|
||||||
|
val qq = Query.QueryExpr(None)
|
||||||
|
val q = Query(qf, qq)
|
||||||
|
QueryParseResult.Success(q, Some(fts))
|
||||||
|
|
||||||
|
case FulltextExtract.Result.SuccessBoth(expr, fts) =>
|
||||||
|
val qf = Query.Fix(accountId, Some(validItemQuery), None)
|
||||||
|
val qq = Query.QueryExpr(expr)
|
||||||
|
val q = Query(qf, qq)
|
||||||
|
QueryParseResult.Success(q, Some(fts))
|
||||||
|
|
||||||
|
case f: FulltextExtract.FailureResult =>
|
||||||
|
QueryParseResult.FulltextMismatch(f)
|
||||||
|
}
|
||||||
|
|
||||||
|
case Left(err) =>
|
||||||
|
QueryParseResult.ParseFailed(err).cast
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def search(maxNoteLen: Int, today: LocalDate, batch: Batch)(
|
||||||
|
q: Query,
|
||||||
|
fulltextQuery: Option[String]
|
||||||
|
): F[Vector[ListItem]] =
|
||||||
|
fulltextQuery match {
|
||||||
|
case Some(ftq) =>
|
||||||
|
for {
|
||||||
|
timed <- Duration.stopTime[F]
|
||||||
|
ftq <- createFtsQuery(q.fix.account, ftq)
|
||||||
|
|
||||||
|
results <- WeakAsync.liftK[F, ConnectionIO].use { nat =>
|
||||||
|
val tempTable = temporaryFtsTable(ftq, nat)
|
||||||
|
store
|
||||||
|
.transact(
|
||||||
|
Stream
|
||||||
|
.eval(tempTable)
|
||||||
|
.flatMap(tt =>
|
||||||
|
QItem.queryItems(q, today, maxNoteLen, batch, tt.some)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.compile
|
||||||
|
.toVector
|
||||||
|
}
|
||||||
|
duration <- timed
|
||||||
|
_ <- logger.debug(s"Simple search with fts in: ${duration.formatExact}")
|
||||||
|
} yield results
|
||||||
|
|
||||||
|
case None =>
|
||||||
|
for {
|
||||||
|
timed <- Duration.stopTime[F]
|
||||||
|
results <- store
|
||||||
|
.transact(QItem.queryItems(q, today, maxNoteLen, batch, None))
|
||||||
|
.compile
|
||||||
|
.toVector
|
||||||
|
duration <- timed
|
||||||
|
_ <- logger.debug(s"Simple search sql in: ${duration.formatExact}")
|
||||||
|
} yield results
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
def searchWithDetails(
|
||||||
|
maxNoteLen: Int,
|
||||||
|
today: LocalDate,
|
||||||
|
batch: Batch
|
||||||
|
)(
|
||||||
|
q: Query,
|
||||||
|
fulltextQuery: Option[String]
|
||||||
|
): F[Vector[ListItemWithTags]] =
|
||||||
|
for {
|
||||||
|
items <- search(maxNoteLen, today, batch)(q, fulltextQuery)
|
||||||
|
timed <- Duration.stopTime[F]
|
||||||
|
resolved <- store
|
||||||
|
.transact(
|
||||||
|
QItem.findItemsWithTags(q.fix.account.collective, Stream.emits(items))
|
||||||
|
)
|
||||||
|
.compile
|
||||||
|
.toVector
|
||||||
|
duration <- timed
|
||||||
|
_ <- logger.debug(s"Search: resolved details in: ${duration.formatExact}")
|
||||||
|
} yield resolved
|
||||||
|
|
||||||
|
def searchSummary(
|
||||||
|
today: LocalDate
|
||||||
|
)(q: Query, fulltextQuery: Option[String]): F[SearchSummary] =
|
||||||
|
fulltextQuery match {
|
||||||
|
case Some(ftq) =>
|
||||||
|
for {
|
||||||
|
ftq <- createFtsQuery(q.fix.account, ftq)
|
||||||
|
results <- WeakAsync.liftK[F, ConnectionIO].use { nat =>
|
||||||
|
val tempTable = temporaryFtsTable(ftq, nat)
|
||||||
|
store.transact(
|
||||||
|
tempTable.flatMap(tt => QItem.searchStats(today, tt.some)(q))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} yield results
|
||||||
|
|
||||||
|
case None =>
|
||||||
|
store.transact(QItem.searchStats(today, None)(q))
|
||||||
|
}
|
||||||
|
|
||||||
|
private def createFtsQuery(
|
||||||
|
account: AccountId,
|
||||||
|
ftq: String
|
||||||
|
): F[FtsQuery] =
|
||||||
|
store
|
||||||
|
.transact(QFolder.getMemberFolders(account))
|
||||||
|
.map(folders =>
|
||||||
|
FtsQuery(ftq, account.collective, 500, 0)
|
||||||
|
.withFolders(folders)
|
||||||
|
)
|
||||||
|
|
||||||
|
def temporaryFtsTable(
|
||||||
|
ftq: FtsQuery,
|
||||||
|
nat: F ~> ConnectionIO
|
||||||
|
): ConnectionIO[RFtsResult.Table] =
|
||||||
|
ftsClient
|
||||||
|
.searchAll(ftq)
|
||||||
|
.translate(nat)
|
||||||
|
.through(RFtsResult.prepareTable(store.dbms, "fts_result"))
|
||||||
|
.compile
|
||||||
|
.lastOrError
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,39 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.ops.search
|
||||||
|
|
||||||
|
import docspell.query.{FulltextExtract, ParseFailure}
|
||||||
|
import docspell.store.queries.Query
|
||||||
|
|
||||||
|
sealed trait QueryParseResult {
|
||||||
|
def cast: QueryParseResult = this
|
||||||
|
|
||||||
|
def get: Option[(Query, Option[String])]
|
||||||
|
def isSuccess: Boolean = get.isDefined
|
||||||
|
def isFailure: Boolean = !isSuccess
|
||||||
|
}
|
||||||
|
|
||||||
|
object QueryParseResult {
|
||||||
|
|
||||||
|
final case class Success(q: Query, ftq: Option[String]) extends QueryParseResult {
|
||||||
|
|
||||||
|
/** Drop the fulltext search query if disabled. */
|
||||||
|
def withFtsEnabled(enabled: Boolean) =
|
||||||
|
if (enabled || ftq.isEmpty) this else copy(ftq = None)
|
||||||
|
|
||||||
|
val get = Some(q -> ftq)
|
||||||
|
}
|
||||||
|
|
||||||
|
final case class ParseFailed(error: ParseFailure) extends QueryParseResult {
|
||||||
|
val get = None
|
||||||
|
}
|
||||||
|
|
||||||
|
final case class FulltextMismatch(error: FulltextExtract.FailureResult)
|
||||||
|
extends QueryParseResult {
|
||||||
|
val get = None
|
||||||
|
}
|
||||||
|
}
|
@ -80,4 +80,7 @@ object Ident {
|
|||||||
|
|
||||||
implicit val order: Order[Ident] =
|
implicit val order: Order[Ident] =
|
||||||
Order.by(_.id)
|
Order.by(_.id)
|
||||||
|
|
||||||
|
implicit val ordering: Ordering[Ident] =
|
||||||
|
Ordering.by(_.id)
|
||||||
}
|
}
|
||||||
|
@ -37,6 +37,8 @@ final case class FtsQuery(
|
|||||||
}
|
}
|
||||||
|
|
||||||
object FtsQuery {
|
object FtsQuery {
|
||||||
|
def apply(q: String, collective: Ident, limit: Int, offset: Int): FtsQuery =
|
||||||
|
FtsQuery(q, collective, Set.empty, Set.empty, limit, offset, HighlightSetting.default)
|
||||||
|
|
||||||
case class HighlightSetting(pre: String, post: String)
|
case class HighlightSetting(pre: String, post: String)
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ class MigrationTest
|
|||||||
PostgreSQLContainer.Def(DockerImageName.parse("postgres:14"))
|
PostgreSQLContainer.Def(DockerImageName.parse("postgres:14"))
|
||||||
|
|
||||||
override def docspellLogConfig: LogConfig =
|
override def docspellLogConfig: LogConfig =
|
||||||
super.docspellLogConfig.docspellLevel(Level.Debug)
|
super.docspellLogConfig.docspellLevel(Level.Error)
|
||||||
|
|
||||||
test("create schema") {
|
test("create schema") {
|
||||||
withContainers { cnt =>
|
withContainers { cnt =>
|
||||||
|
@ -32,7 +32,7 @@ class PsqlFtsClientTest
|
|||||||
private val table = FtsRepository.table
|
private val table = FtsRepository.table
|
||||||
|
|
||||||
override def docspellLogConfig: LogConfig =
|
override def docspellLogConfig: LogConfig =
|
||||||
super.docspellLogConfig.docspellLevel(Level.Debug)
|
super.docspellLogConfig.docspellLevel(Level.Error)
|
||||||
|
|
||||||
test("insert data into index") {
|
test("insert data into index") {
|
||||||
withContainers { cnt =>
|
withContainers { cnt =>
|
||||||
|
@ -11,6 +11,7 @@ import cats.effect.Async
|
|||||||
import docspell.config.Implicits._
|
import docspell.config.Implicits._
|
||||||
import docspell.config.{ConfigFactory, FtsType, Validation}
|
import docspell.config.{ConfigFactory, FtsType, Validation}
|
||||||
import docspell.scheduler.CountingScheme
|
import docspell.scheduler.CountingScheme
|
||||||
|
import docspell.store.Db
|
||||||
|
|
||||||
import emil.MailAddress
|
import emil.MailAddress
|
||||||
import emil.javamail.syntax._
|
import emil.javamail.syntax._
|
||||||
@ -59,7 +60,7 @@ object ConfigFile {
|
|||||||
cfg.fullTextSearch.enabled &&
|
cfg.fullTextSearch.enabled &&
|
||||||
cfg.fullTextSearch.backend == FtsType.PostgreSQL &&
|
cfg.fullTextSearch.backend == FtsType.PostgreSQL &&
|
||||||
cfg.fullTextSearch.postgresql.useDefaultConnection &&
|
cfg.fullTextSearch.postgresql.useDefaultConnection &&
|
||||||
!cfg.jdbc.dbmsName.contains("postgresql"),
|
cfg.jdbc.dbms != Db.PostgreSQL,
|
||||||
s"PostgreSQL defined fulltext search backend with default-connection, which is not a PostgreSQL connection!"
|
s"PostgreSQL defined fulltext search backend with default-connection, which is not a PostgreSQL connection!"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -79,7 +79,7 @@ object BasicData {
|
|||||||
Query.Fix(
|
Query.Fix(
|
||||||
account,
|
account,
|
||||||
Some(ItemQuery.Attr.ItemId.in(itemIds.map(_.id))),
|
Some(ItemQuery.Attr.ItemId.in(itemIds.map(_.id))),
|
||||||
Some(_.created)
|
Some(_.byItemColumnAsc(_.created))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
for {
|
for {
|
||||||
|
@ -3028,40 +3028,6 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/ItemLightList"
|
$ref: "#/components/schemas/ItemLightList"
|
||||||
|
|
||||||
/sec/item/searchIndex:
|
|
||||||
post:
|
|
||||||
operationId: "sec-item-search-index"
|
|
||||||
tags: [ Item Search ]
|
|
||||||
summary: Search for items using full-text search only.
|
|
||||||
description: |
|
|
||||||
Search for items by only using the full-text search index.
|
|
||||||
|
|
||||||
Unlike the other search routes, this one only asks the
|
|
||||||
full-text search index and returns only one group that
|
|
||||||
contains the results in the same order as given from the
|
|
||||||
index. Most full-text search engines use an ordering that
|
|
||||||
reflect the relevance wrt the search term.
|
|
||||||
|
|
||||||
The other search routes always order the results by some
|
|
||||||
property (the item date) and thus the relevance ordering is
|
|
||||||
destroyed when using the full-text search.
|
|
||||||
security:
|
|
||||||
- authTokenHeader: []
|
|
||||||
requestBody:
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: "#/components/schemas/ItemQuery"
|
|
||||||
responses:
|
|
||||||
422:
|
|
||||||
description: BadRequest
|
|
||||||
200:
|
|
||||||
description: Ok
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: "#/components/schemas/ItemLightList"
|
|
||||||
|
|
||||||
/sec/item/searchStats:
|
/sec/item/searchStats:
|
||||||
post:
|
post:
|
||||||
operationId: "sec-item-search-stats-get"
|
operationId: "sec-item-search-stats-get"
|
||||||
|
@ -70,6 +70,8 @@ docspell.server {
|
|||||||
# In order to keep this low, a limit can be defined here.
|
# In order to keep this low, a limit can be defined here.
|
||||||
max-note-length = 180
|
max-note-length = 180
|
||||||
|
|
||||||
|
feature-search-2 = true
|
||||||
|
|
||||||
|
|
||||||
# This defines whether the classification form in the collective
|
# This defines whether the classification form in the collective
|
||||||
# settings is displayed or not. If all joex instances have document
|
# settings is displayed or not. If all joex instances have document
|
||||||
|
@ -34,6 +34,7 @@ case class Config(
|
|||||||
integrationEndpoint: Config.IntegrationEndpoint,
|
integrationEndpoint: Config.IntegrationEndpoint,
|
||||||
maxItemPageSize: Int,
|
maxItemPageSize: Int,
|
||||||
maxNoteLength: Int,
|
maxNoteLength: Int,
|
||||||
|
featureSearch2: Boolean,
|
||||||
fullTextSearch: Config.FullTextSearch,
|
fullTextSearch: Config.FullTextSearch,
|
||||||
adminEndpoint: Config.AdminEndpoint,
|
adminEndpoint: Config.AdminEndpoint,
|
||||||
openid: List[OpenIdConfig],
|
openid: List[OpenIdConfig],
|
||||||
|
@ -16,6 +16,7 @@ import docspell.config.Implicits._
|
|||||||
import docspell.config.{ConfigFactory, FtsType, Validation}
|
import docspell.config.{ConfigFactory, FtsType, Validation}
|
||||||
import docspell.oidc.{ProviderConfig, SignatureAlgo}
|
import docspell.oidc.{ProviderConfig, SignatureAlgo}
|
||||||
import docspell.restserver.auth.OpenId
|
import docspell.restserver.auth.OpenId
|
||||||
|
import docspell.store.Db
|
||||||
|
|
||||||
import pureconfig._
|
import pureconfig._
|
||||||
import pureconfig.generic.auto._
|
import pureconfig.generic.auto._
|
||||||
@ -113,7 +114,7 @@ object ConfigFile {
|
|||||||
cfg.fullTextSearch.enabled &&
|
cfg.fullTextSearch.enabled &&
|
||||||
cfg.fullTextSearch.backend == FtsType.PostgreSQL &&
|
cfg.fullTextSearch.backend == FtsType.PostgreSQL &&
|
||||||
cfg.fullTextSearch.postgresql.useDefaultConnection &&
|
cfg.fullTextSearch.postgresql.useDefaultConnection &&
|
||||||
!cfg.backend.jdbc.dbmsName.contains("postgresql"),
|
cfg.backend.jdbc.dbms != Db.PostgreSQL,
|
||||||
s"PostgreSQL defined fulltext search backend with default-connection, which is not a PostgreSQL connection!"
|
s"PostgreSQL defined fulltext search backend with default-connection, which is not a PostgreSQL connection!"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -35,7 +35,10 @@ import org.http4s.server.websocket.WebSocketBuilder2
|
|||||||
|
|
||||||
object RestServer {
|
object RestServer {
|
||||||
|
|
||||||
def serve[F[_]: Async](cfg: Config, pools: Pools): F[ExitCode] =
|
def serve[F[_]: Async](
|
||||||
|
cfg: Config,
|
||||||
|
pools: Pools
|
||||||
|
): F[ExitCode] =
|
||||||
for {
|
for {
|
||||||
wsTopic <- Topic[F, OutputEvent]
|
wsTopic <- Topic[F, OutputEvent]
|
||||||
keepAlive = Stream
|
keepAlive = Stream
|
||||||
@ -102,7 +105,8 @@ object RestServer {
|
|||||||
cfg.auth.serverSecret.some
|
cfg.auth.serverSecret.some
|
||||||
)
|
)
|
||||||
|
|
||||||
restApp <- RestAppImpl.create[F](cfg, pools, store, httpClient, pubSub, wsTopic)
|
restApp <- RestAppImpl
|
||||||
|
.create[F](cfg, pools, store, httpClient, pubSub, wsTopic)
|
||||||
} yield (restApp, pubSub, setting)
|
} yield (restApp, pubSub, setting)
|
||||||
|
|
||||||
def createHttpApp[F[_]: Async](
|
def createHttpApp[F[_]: Async](
|
||||||
|
@ -297,7 +297,7 @@ trait Conversions {
|
|||||||
relatedItems = i.relatedItems
|
relatedItems = i.relatedItems
|
||||||
)
|
)
|
||||||
|
|
||||||
private def mkAttachmentLight(qa: QAttachmentLight): AttachmentLight =
|
def mkAttachmentLight(qa: QAttachmentLight): AttachmentLight =
|
||||||
AttachmentLight(qa.id, qa.position, qa.name, qa.pageCount)
|
AttachmentLight(qa.id, qa.position, qa.name, qa.pageCount)
|
||||||
|
|
||||||
def mkItemLightWithTags(i: OFulltext.FtsItemWithTags): ItemLight = {
|
def mkItemLightWithTags(i: OFulltext.FtsItemWithTags): ItemLight = {
|
||||||
|
@ -13,7 +13,6 @@ import cats.implicits._
|
|||||||
import docspell.backend.BackendApp
|
import docspell.backend.BackendApp
|
||||||
import docspell.backend.auth.AuthToken
|
import docspell.backend.auth.AuthToken
|
||||||
import docspell.backend.ops.OCustomFields.{RemoveValue, SetValue}
|
import docspell.backend.ops.OCustomFields.{RemoveValue, SetValue}
|
||||||
import docspell.backend.ops.OFulltext
|
|
||||||
import docspell.backend.ops.OItemSearch.{Batch, Query}
|
import docspell.backend.ops.OItemSearch.{Batch, Query}
|
||||||
import docspell.backend.ops.OSimpleSearch
|
import docspell.backend.ops.OSimpleSearch
|
||||||
import docspell.backend.ops.OSimpleSearch.StringSearchResult
|
import docspell.backend.ops.OSimpleSearch.StringSearchResult
|
||||||
@ -41,389 +40,376 @@ object ItemRoutes {
|
|||||||
user: AuthToken
|
user: AuthToken
|
||||||
): HttpRoutes[F] = {
|
): HttpRoutes[F] = {
|
||||||
val logger = docspell.logging.getLogger[F]
|
val logger = docspell.logging.getLogger[F]
|
||||||
|
val searchPart = ItemSearchPart[F](backend, cfg, user)
|
||||||
val dsl = new Http4sDsl[F] {}
|
val dsl = new Http4sDsl[F] {}
|
||||||
import dsl._
|
import dsl._
|
||||||
|
|
||||||
HttpRoutes.of {
|
searchPart.routes <+>
|
||||||
case GET -> Root / "search" :? QP.Query(q) :? QP.Limit(limit) :? QP.Offset(
|
HttpRoutes.of {
|
||||||
offset
|
case GET -> Root / "search" :? QP.Query(q) :? QP.Limit(limit) :? QP.Offset(
|
||||||
) :? QP.WithDetails(detailFlag) :? QP.SearchKind(searchMode) =>
|
offset
|
||||||
val batch = Batch(offset.getOrElse(0), limit.getOrElse(cfg.maxItemPageSize))
|
) :? QP.WithDetails(detailFlag) :? QP.SearchKind(searchMode) =>
|
||||||
.restrictLimitTo(cfg.maxItemPageSize)
|
val batch = Batch(offset.getOrElse(0), limit.getOrElse(cfg.maxItemPageSize))
|
||||||
val limitCapped = limit.exists(_ > cfg.maxItemPageSize)
|
.restrictLimitTo(cfg.maxItemPageSize)
|
||||||
val itemQuery = ItemQueryString(q)
|
val limitCapped = limit.exists(_ > cfg.maxItemPageSize)
|
||||||
val settings = OSimpleSearch.Settings(
|
val itemQuery = ItemQueryString(q)
|
||||||
batch,
|
val settings = OSimpleSearch.Settings(
|
||||||
cfg.fullTextSearch.enabled,
|
|
||||||
detailFlag.getOrElse(false),
|
|
||||||
cfg.maxNoteLength,
|
|
||||||
searchMode.getOrElse(SearchMode.Normal)
|
|
||||||
)
|
|
||||||
val fixQuery = Query.Fix(user.account, None, None)
|
|
||||||
searchItems(backend, dsl)(settings, fixQuery, itemQuery, limitCapped)
|
|
||||||
|
|
||||||
case GET -> Root / "searchStats" :? QP.Query(q) :? QP.SearchKind(searchMode) =>
|
|
||||||
val itemQuery = ItemQueryString(q)
|
|
||||||
val fixQuery = Query.Fix(user.account, None, None)
|
|
||||||
val settings = OSimpleSearch.StatsSettings(
|
|
||||||
useFTS = cfg.fullTextSearch.enabled,
|
|
||||||
searchMode = searchMode.getOrElse(SearchMode.Normal)
|
|
||||||
)
|
|
||||||
searchItemStats(backend, dsl)(settings, fixQuery, itemQuery)
|
|
||||||
|
|
||||||
case req @ POST -> Root / "search" =>
|
|
||||||
for {
|
|
||||||
userQuery <- req.as[ItemQuery]
|
|
||||||
batch = Batch(
|
|
||||||
userQuery.offset.getOrElse(0),
|
|
||||||
userQuery.limit.getOrElse(cfg.maxItemPageSize)
|
|
||||||
).restrictLimitTo(
|
|
||||||
cfg.maxItemPageSize
|
|
||||||
)
|
|
||||||
limitCapped = userQuery.limit.exists(_ > cfg.maxItemPageSize)
|
|
||||||
itemQuery = ItemQueryString(userQuery.query)
|
|
||||||
settings = OSimpleSearch.Settings(
|
|
||||||
batch,
|
batch,
|
||||||
cfg.fullTextSearch.enabled,
|
cfg.fullTextSearch.enabled,
|
||||||
userQuery.withDetails.getOrElse(false),
|
detailFlag.getOrElse(false),
|
||||||
cfg.maxNoteLength,
|
cfg.maxNoteLength,
|
||||||
searchMode = userQuery.searchMode.getOrElse(SearchMode.Normal)
|
searchMode.getOrElse(SearchMode.Normal)
|
||||||
)
|
)
|
||||||
fixQuery = Query.Fix(user.account, None, None)
|
val fixQuery = Query.Fix(user.account, None, None)
|
||||||
resp <- searchItems(backend, dsl)(settings, fixQuery, itemQuery, limitCapped)
|
searchItems(backend, dsl)(settings, fixQuery, itemQuery, limitCapped)
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ POST -> Root / "searchStats" =>
|
case GET -> Root / "searchStats" :? QP.Query(q) :? QP.SearchKind(searchMode) =>
|
||||||
for {
|
val itemQuery = ItemQueryString(q)
|
||||||
userQuery <- req.as[ItemQuery]
|
val fixQuery = Query.Fix(user.account, None, None)
|
||||||
itemQuery = ItemQueryString(userQuery.query)
|
val settings = OSimpleSearch.StatsSettings(
|
||||||
fixQuery = Query.Fix(user.account, None, None)
|
|
||||||
settings = OSimpleSearch.StatsSettings(
|
|
||||||
useFTS = cfg.fullTextSearch.enabled,
|
useFTS = cfg.fullTextSearch.enabled,
|
||||||
searchMode = userQuery.searchMode.getOrElse(SearchMode.Normal)
|
searchMode = searchMode.getOrElse(SearchMode.Normal)
|
||||||
)
|
)
|
||||||
resp <- searchItemStats(backend, dsl)(settings, fixQuery, itemQuery)
|
searchItemStats(backend, dsl)(settings, fixQuery, itemQuery)
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ POST -> Root / "searchIndex" =>
|
case req @ POST -> Root / "search" =>
|
||||||
for {
|
for {
|
||||||
mask <- req.as[ItemQuery]
|
timed <- Duration.stopTime[F]
|
||||||
limitCapped = mask.limit.exists(_ > cfg.maxItemPageSize)
|
userQuery <- req.as[ItemQuery]
|
||||||
resp <- mask.query match {
|
batch = Batch(
|
||||||
case q if q.length > 1 =>
|
userQuery.offset.getOrElse(0),
|
||||||
val ftsIn = OFulltext.FtsInput(q)
|
userQuery.limit.getOrElse(cfg.maxItemPageSize)
|
||||||
val batch = Batch(
|
).restrictLimitTo(
|
||||||
mask.offset.getOrElse(0),
|
cfg.maxItemPageSize
|
||||||
mask.limit.getOrElse(cfg.maxItemPageSize)
|
)
|
||||||
).restrictLimitTo(cfg.maxItemPageSize)
|
limitCapped = userQuery.limit.exists(_ > cfg.maxItemPageSize)
|
||||||
for {
|
itemQuery = ItemQueryString(userQuery.query)
|
||||||
items <- backend.fulltext
|
settings = OSimpleSearch.Settings(
|
||||||
.findIndexOnly(cfg.maxNoteLength)(ftsIn, user.account, batch)
|
batch,
|
||||||
ok <- Ok(
|
cfg.fullTextSearch.enabled,
|
||||||
Conversions.mkItemListWithTagsFtsPlain(items, batch, limitCapped)
|
userQuery.withDetails.getOrElse(false),
|
||||||
|
cfg.maxNoteLength,
|
||||||
|
searchMode = userQuery.searchMode.getOrElse(SearchMode.Normal)
|
||||||
|
)
|
||||||
|
fixQuery = Query.Fix(user.account, None, None)
|
||||||
|
resp <- searchItems(backend, dsl)(settings, fixQuery, itemQuery, limitCapped)
|
||||||
|
dur <- timed
|
||||||
|
_ <- logger.debug(s"Search request: ${dur.formatExact}")
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / "searchStats" =>
|
||||||
|
for {
|
||||||
|
userQuery <- req.as[ItemQuery]
|
||||||
|
itemQuery = ItemQueryString(userQuery.query)
|
||||||
|
fixQuery = Query.Fix(user.account, None, None)
|
||||||
|
settings = OSimpleSearch.StatsSettings(
|
||||||
|
useFTS = cfg.fullTextSearch.enabled,
|
||||||
|
searchMode = userQuery.searchMode.getOrElse(SearchMode.Normal)
|
||||||
|
)
|
||||||
|
resp <- searchItemStats(backend, dsl)(settings, fixQuery, itemQuery)
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case GET -> Root / Ident(id) =>
|
||||||
|
for {
|
||||||
|
item <- backend.itemSearch.findItem(id, user.account.collective)
|
||||||
|
result = item.map(Conversions.mkItemDetail)
|
||||||
|
resp <-
|
||||||
|
result
|
||||||
|
.map(r => Ok(r))
|
||||||
|
.getOrElse(NotFound(BasicResult(false, "Not found.")))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case POST -> Root / Ident(id) / "confirm" =>
|
||||||
|
for {
|
||||||
|
res <- backend.item.setState(id, ItemState.Confirmed, user.account.collective)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Item data confirmed"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case POST -> Root / Ident(id) / "unconfirm" =>
|
||||||
|
for {
|
||||||
|
res <- backend.item.setState(id, ItemState.Created, user.account.collective)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Item back to created."))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case POST -> Root / Ident(id) / "restore" =>
|
||||||
|
for {
|
||||||
|
res <- backend.item.restore(NonEmptyList.of(id), user.account.collective)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Item restored."))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "tags" =>
|
||||||
|
for {
|
||||||
|
tags <- req.as[StringList].map(_.items)
|
||||||
|
res <- backend.item.setTags(id, tags, user.account.collective)
|
||||||
|
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
||||||
|
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
||||||
|
resp <- Ok(Conversions.basicResult(res.value, "Tags updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / Ident(id) / "tags" =>
|
||||||
|
for {
|
||||||
|
data <- req.as[Tag]
|
||||||
|
rtag <- Conversions.newTag(data, user.account.collective)
|
||||||
|
res <- backend.item.addNewTag(user.account.collective, id, rtag)
|
||||||
|
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
||||||
|
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
||||||
|
resp <- Ok(Conversions.basicResult(res.value, "Tag added."))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "taglink" =>
|
||||||
|
for {
|
||||||
|
tags <- req.as[StringList]
|
||||||
|
res <- backend.item.linkTags(id, tags.items, user.account.collective)
|
||||||
|
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
||||||
|
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
||||||
|
resp <- Ok(Conversions.basicResult(res.value, "Tags linked"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / Ident(id) / "tagtoggle" =>
|
||||||
|
for {
|
||||||
|
tags <- req.as[StringList]
|
||||||
|
res <- backend.item.toggleTags(id, tags.items, user.account.collective)
|
||||||
|
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
||||||
|
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
||||||
|
resp <- Ok(Conversions.basicResult(res.value, "Tags linked"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / Ident(id) / "tagsremove" =>
|
||||||
|
for {
|
||||||
|
json <- req.as[StringList]
|
||||||
|
res <- backend.item.removeTagsMultipleItems(
|
||||||
|
NonEmptyList.of(id),
|
||||||
|
json.items,
|
||||||
|
user.account.collective
|
||||||
|
)
|
||||||
|
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
||||||
|
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
||||||
|
resp <- Ok(Conversions.basicResult(res.value, "Tags removed"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "direction" =>
|
||||||
|
for {
|
||||||
|
dir <- req.as[DirectionValue]
|
||||||
|
res <- backend.item.setDirection(
|
||||||
|
NonEmptyList.of(id),
|
||||||
|
dir.direction,
|
||||||
|
user.account.collective
|
||||||
|
)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Direction updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "folder" =>
|
||||||
|
for {
|
||||||
|
idref <- req.as[OptionalId]
|
||||||
|
res <- backend.item.setFolder(id, idref.id.map(_.id), user.account.collective)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Folder updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "corrOrg" =>
|
||||||
|
for {
|
||||||
|
idref <- req.as[OptionalId]
|
||||||
|
res <- backend.item.setCorrOrg(
|
||||||
|
NonEmptyList.of(id),
|
||||||
|
idref.id,
|
||||||
|
user.account.collective
|
||||||
|
)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Correspondent organization updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / Ident(id) / "corrOrg" =>
|
||||||
|
for {
|
||||||
|
data <- req.as[Organization]
|
||||||
|
org <- Conversions.newOrg(data, user.account.collective)
|
||||||
|
res <- backend.item.addCorrOrg(id, org)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Correspondent organization updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "corrPerson" =>
|
||||||
|
for {
|
||||||
|
idref <- req.as[OptionalId]
|
||||||
|
res <- backend.item.setCorrPerson(
|
||||||
|
NonEmptyList.of(id),
|
||||||
|
idref.id,
|
||||||
|
user.account.collective
|
||||||
|
)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Correspondent person updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / Ident(id) / "corrPerson" =>
|
||||||
|
for {
|
||||||
|
data <- req.as[Person]
|
||||||
|
pers <- Conversions.newPerson(data, user.account.collective)
|
||||||
|
res <- backend.item.addCorrPerson(id, pers)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Correspondent person updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "concPerson" =>
|
||||||
|
for {
|
||||||
|
idref <- req.as[OptionalId]
|
||||||
|
res <- backend.item.setConcPerson(
|
||||||
|
NonEmptyList.of(id),
|
||||||
|
idref.id,
|
||||||
|
user.account.collective
|
||||||
|
)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Concerned person updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / Ident(id) / "concPerson" =>
|
||||||
|
for {
|
||||||
|
data <- req.as[Person]
|
||||||
|
pers <- Conversions.newPerson(data, user.account.collective)
|
||||||
|
res <- backend.item.addConcPerson(id, pers)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Concerned person updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "concEquipment" =>
|
||||||
|
for {
|
||||||
|
idref <- req.as[OptionalId]
|
||||||
|
res <- backend.item.setConcEquip(
|
||||||
|
NonEmptyList.of(id),
|
||||||
|
idref.id,
|
||||||
|
user.account.collective
|
||||||
|
)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / Ident(id) / "concEquipment" =>
|
||||||
|
for {
|
||||||
|
data <- req.as[Equipment]
|
||||||
|
equip <- Conversions.newEquipment(data, user.account.collective)
|
||||||
|
res <- backend.item.addConcEquip(id, equip)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "notes" =>
|
||||||
|
for {
|
||||||
|
text <- req.as[OptionalText]
|
||||||
|
res <- backend.item.setNotes(id, text.text.notEmpty, user.account.collective)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Notes updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "name" =>
|
||||||
|
for {
|
||||||
|
text <- req.as[OptionalText]
|
||||||
|
res <- backend.item.setName(
|
||||||
|
id,
|
||||||
|
text.text.notEmpty.getOrElse(""),
|
||||||
|
user.account.collective
|
||||||
|
)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Name updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "duedate" =>
|
||||||
|
for {
|
||||||
|
date <- req.as[OptionalDate]
|
||||||
|
_ <- logger.debug(s"Setting item due date to ${date.date}")
|
||||||
|
res <- backend.item.setItemDueDate(
|
||||||
|
NonEmptyList.of(id),
|
||||||
|
date.date,
|
||||||
|
user.account.collective
|
||||||
|
)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Item due date updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) / "date" =>
|
||||||
|
for {
|
||||||
|
date <- req.as[OptionalDate]
|
||||||
|
_ <- logger.debug(s"Setting item date to ${date.date}")
|
||||||
|
res <- backend.item.setItemDate(
|
||||||
|
NonEmptyList.of(id),
|
||||||
|
date.date,
|
||||||
|
user.account.collective
|
||||||
|
)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Item date updated"))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case GET -> Root / Ident(id) / "proposals" =>
|
||||||
|
for {
|
||||||
|
ml <- backend.item.getProposals(id, user.account.collective)
|
||||||
|
ip = Conversions.mkItemProposals(ml)
|
||||||
|
resp <- Ok(ip)
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / Ident(id) / "attachment" / "movebefore" =>
|
||||||
|
for {
|
||||||
|
data <- req.as[MoveAttachment]
|
||||||
|
_ <- logger.debug(s"Move item (${id.id}) attachment $data")
|
||||||
|
res <- backend.item.moveAttachmentBefore(id, data.source, data.target)
|
||||||
|
resp <- Ok(Conversions.basicResult(res, "Attachment moved."))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ GET -> Root / Ident(id) / "preview" :? QP.WithFallback(flag) =>
|
||||||
|
def notFound =
|
||||||
|
NotFound(BasicResult(false, "Not found"))
|
||||||
|
for {
|
||||||
|
preview <- backend.itemSearch.findItemPreview(id, user.account.collective)
|
||||||
|
inm = req.headers.get[`If-None-Match`].flatMap(_.tags)
|
||||||
|
matches = BinaryUtil.matchETag(preview.map(_.meta), inm)
|
||||||
|
fallback = flag.getOrElse(false)
|
||||||
|
resp <-
|
||||||
|
preview
|
||||||
|
.map { data =>
|
||||||
|
if (matches) BinaryUtil.withResponseHeaders(dsl, NotModified())(data)
|
||||||
|
else BinaryUtil.makeByteResp(dsl)(data).map(Responses.noCache)
|
||||||
|
}
|
||||||
|
.getOrElse(
|
||||||
|
if (fallback) BinaryUtil.noPreview(req.some).getOrElseF(notFound)
|
||||||
|
else notFound
|
||||||
)
|
)
|
||||||
} yield ok
|
} yield resp
|
||||||
|
|
||||||
case _ =>
|
case HEAD -> Root / Ident(id) / "preview" =>
|
||||||
BadRequest(BasicResult(false, "Query string too short"))
|
for {
|
||||||
}
|
preview <- backend.itemSearch.findItemPreview(id, user.account.collective)
|
||||||
} yield resp
|
resp <-
|
||||||
|
preview
|
||||||
|
.map(data => BinaryUtil.withResponseHeaders(dsl, Ok())(data))
|
||||||
|
.getOrElse(NotFound(BasicResult(false, "Not found")))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
case GET -> Root / Ident(id) =>
|
case req @ POST -> Root / Ident(id) / "reprocess" =>
|
||||||
for {
|
for {
|
||||||
item <- backend.itemSearch.findItem(id, user.account.collective)
|
data <- req.as[IdList]
|
||||||
result = item.map(Conversions.mkItemDetail)
|
_ <- logger.debug(s"Re-process item ${id.id}")
|
||||||
resp <-
|
res <- backend.item.reprocess(id, data.ids, user.account)
|
||||||
result
|
resp <- Ok(Conversions.basicResult(res, "Re-process task submitted."))
|
||||||
.map(r => Ok(r))
|
} yield resp
|
||||||
.getOrElse(NotFound(BasicResult(false, "Not found.")))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case POST -> Root / Ident(id) / "confirm" =>
|
case req @ PUT -> Root / Ident(id) / "customfield" =>
|
||||||
for {
|
for {
|
||||||
res <- backend.item.setState(id, ItemState.Confirmed, user.account.collective)
|
data <- req.as[CustomFieldValue]
|
||||||
resp <- Ok(Conversions.basicResult(res, "Item data confirmed"))
|
res <- backend.customFields.setValue(
|
||||||
} yield resp
|
id,
|
||||||
|
SetValue(data.field, data.value, user.account.collective)
|
||||||
|
)
|
||||||
|
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
||||||
|
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
||||||
|
resp <- Ok(Conversions.basicResult(res.value))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
case POST -> Root / Ident(id) / "unconfirm" =>
|
case req @ DELETE -> Root / Ident(id) / "customfield" / Ident(fieldId) =>
|
||||||
for {
|
for {
|
||||||
res <- backend.item.setState(id, ItemState.Created, user.account.collective)
|
res <- backend.customFields.deleteValue(
|
||||||
resp <- Ok(Conversions.basicResult(res, "Item back to created."))
|
RemoveValue(fieldId, NonEmptyList.of(id), user.account.collective)
|
||||||
} yield resp
|
)
|
||||||
|
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
||||||
|
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
||||||
|
resp <- Ok(Conversions.basicResult(res.value, "Custom field value removed."))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
case POST -> Root / Ident(id) / "restore" =>
|
case DELETE -> Root / Ident(id) =>
|
||||||
for {
|
for {
|
||||||
res <- backend.item.restore(NonEmptyList.of(id), user.account.collective)
|
n <- backend.item.setDeletedState(
|
||||||
resp <- Ok(Conversions.basicResult(res, "Item restored."))
|
NonEmptyList.of(id),
|
||||||
} yield resp
|
user.account.collective
|
||||||
|
)
|
||||||
case req @ PUT -> Root / Ident(id) / "tags" =>
|
res = BasicResult(
|
||||||
for {
|
n > 0,
|
||||||
tags <- req.as[StringList].map(_.items)
|
if (n > 0) "Item deleted" else "Item deletion failed."
|
||||||
res <- backend.item.setTags(id, tags, user.account.collective)
|
)
|
||||||
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
resp <- Ok(res)
|
||||||
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
} yield resp
|
||||||
resp <- Ok(Conversions.basicResult(res.value, "Tags updated"))
|
}
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ POST -> Root / Ident(id) / "tags" =>
|
|
||||||
for {
|
|
||||||
data <- req.as[Tag]
|
|
||||||
rtag <- Conversions.newTag(data, user.account.collective)
|
|
||||||
res <- backend.item.addNewTag(user.account.collective, id, rtag)
|
|
||||||
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
|
||||||
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
|
||||||
resp <- Ok(Conversions.basicResult(res.value, "Tag added."))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "taglink" =>
|
|
||||||
for {
|
|
||||||
tags <- req.as[StringList]
|
|
||||||
res <- backend.item.linkTags(id, tags.items, user.account.collective)
|
|
||||||
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
|
||||||
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
|
||||||
resp <- Ok(Conversions.basicResult(res.value, "Tags linked"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ POST -> Root / Ident(id) / "tagtoggle" =>
|
|
||||||
for {
|
|
||||||
tags <- req.as[StringList]
|
|
||||||
res <- backend.item.toggleTags(id, tags.items, user.account.collective)
|
|
||||||
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
|
||||||
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
|
||||||
resp <- Ok(Conversions.basicResult(res.value, "Tags linked"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ POST -> Root / Ident(id) / "tagsremove" =>
|
|
||||||
for {
|
|
||||||
json <- req.as[StringList]
|
|
||||||
res <- backend.item.removeTagsMultipleItems(
|
|
||||||
NonEmptyList.of(id),
|
|
||||||
json.items,
|
|
||||||
user.account.collective
|
|
||||||
)
|
|
||||||
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
|
||||||
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
|
||||||
resp <- Ok(Conversions.basicResult(res.value, "Tags removed"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "direction" =>
|
|
||||||
for {
|
|
||||||
dir <- req.as[DirectionValue]
|
|
||||||
res <- backend.item.setDirection(
|
|
||||||
NonEmptyList.of(id),
|
|
||||||
dir.direction,
|
|
||||||
user.account.collective
|
|
||||||
)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Direction updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "folder" =>
|
|
||||||
for {
|
|
||||||
idref <- req.as[OptionalId]
|
|
||||||
res <- backend.item.setFolder(id, idref.id.map(_.id), user.account.collective)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Folder updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "corrOrg" =>
|
|
||||||
for {
|
|
||||||
idref <- req.as[OptionalId]
|
|
||||||
res <- backend.item.setCorrOrg(
|
|
||||||
NonEmptyList.of(id),
|
|
||||||
idref.id,
|
|
||||||
user.account.collective
|
|
||||||
)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Correspondent organization updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ POST -> Root / Ident(id) / "corrOrg" =>
|
|
||||||
for {
|
|
||||||
data <- req.as[Organization]
|
|
||||||
org <- Conversions.newOrg(data, user.account.collective)
|
|
||||||
res <- backend.item.addCorrOrg(id, org)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Correspondent organization updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "corrPerson" =>
|
|
||||||
for {
|
|
||||||
idref <- req.as[OptionalId]
|
|
||||||
res <- backend.item.setCorrPerson(
|
|
||||||
NonEmptyList.of(id),
|
|
||||||
idref.id,
|
|
||||||
user.account.collective
|
|
||||||
)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Correspondent person updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ POST -> Root / Ident(id) / "corrPerson" =>
|
|
||||||
for {
|
|
||||||
data <- req.as[Person]
|
|
||||||
pers <- Conversions.newPerson(data, user.account.collective)
|
|
||||||
res <- backend.item.addCorrPerson(id, pers)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Correspondent person updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "concPerson" =>
|
|
||||||
for {
|
|
||||||
idref <- req.as[OptionalId]
|
|
||||||
res <- backend.item.setConcPerson(
|
|
||||||
NonEmptyList.of(id),
|
|
||||||
idref.id,
|
|
||||||
user.account.collective
|
|
||||||
)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Concerned person updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ POST -> Root / Ident(id) / "concPerson" =>
|
|
||||||
for {
|
|
||||||
data <- req.as[Person]
|
|
||||||
pers <- Conversions.newPerson(data, user.account.collective)
|
|
||||||
res <- backend.item.addConcPerson(id, pers)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Concerned person updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "concEquipment" =>
|
|
||||||
for {
|
|
||||||
idref <- req.as[OptionalId]
|
|
||||||
res <- backend.item.setConcEquip(
|
|
||||||
NonEmptyList.of(id),
|
|
||||||
idref.id,
|
|
||||||
user.account.collective
|
|
||||||
)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ POST -> Root / Ident(id) / "concEquipment" =>
|
|
||||||
for {
|
|
||||||
data <- req.as[Equipment]
|
|
||||||
equip <- Conversions.newEquipment(data, user.account.collective)
|
|
||||||
res <- backend.item.addConcEquip(id, equip)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "notes" =>
|
|
||||||
for {
|
|
||||||
text <- req.as[OptionalText]
|
|
||||||
res <- backend.item.setNotes(id, text.text.notEmpty, user.account.collective)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Notes updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "name" =>
|
|
||||||
for {
|
|
||||||
text <- req.as[OptionalText]
|
|
||||||
res <- backend.item.setName(
|
|
||||||
id,
|
|
||||||
text.text.notEmpty.getOrElse(""),
|
|
||||||
user.account.collective
|
|
||||||
)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Name updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "duedate" =>
|
|
||||||
for {
|
|
||||||
date <- req.as[OptionalDate]
|
|
||||||
_ <- logger.debug(s"Setting item due date to ${date.date}")
|
|
||||||
res <- backend.item.setItemDueDate(
|
|
||||||
NonEmptyList.of(id),
|
|
||||||
date.date,
|
|
||||||
user.account.collective
|
|
||||||
)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Item due date updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "date" =>
|
|
||||||
for {
|
|
||||||
date <- req.as[OptionalDate]
|
|
||||||
_ <- logger.debug(s"Setting item date to ${date.date}")
|
|
||||||
res <- backend.item.setItemDate(
|
|
||||||
NonEmptyList.of(id),
|
|
||||||
date.date,
|
|
||||||
user.account.collective
|
|
||||||
)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Item date updated"))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case GET -> Root / Ident(id) / "proposals" =>
|
|
||||||
for {
|
|
||||||
ml <- backend.item.getProposals(id, user.account.collective)
|
|
||||||
ip = Conversions.mkItemProposals(ml)
|
|
||||||
resp <- Ok(ip)
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ POST -> Root / Ident(id) / "attachment" / "movebefore" =>
|
|
||||||
for {
|
|
||||||
data <- req.as[MoveAttachment]
|
|
||||||
_ <- logger.debug(s"Move item (${id.id}) attachment $data")
|
|
||||||
res <- backend.item.moveAttachmentBefore(id, data.source, data.target)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Attachment moved."))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ GET -> Root / Ident(id) / "preview" :? QP.WithFallback(flag) =>
|
|
||||||
def notFound =
|
|
||||||
NotFound(BasicResult(false, "Not found"))
|
|
||||||
for {
|
|
||||||
preview <- backend.itemSearch.findItemPreview(id, user.account.collective)
|
|
||||||
inm = req.headers.get[`If-None-Match`].flatMap(_.tags)
|
|
||||||
matches = BinaryUtil.matchETag(preview.map(_.meta), inm)
|
|
||||||
fallback = flag.getOrElse(false)
|
|
||||||
resp <-
|
|
||||||
preview
|
|
||||||
.map { data =>
|
|
||||||
if (matches) BinaryUtil.withResponseHeaders(dsl, NotModified())(data)
|
|
||||||
else BinaryUtil.makeByteResp(dsl)(data).map(Responses.noCache)
|
|
||||||
}
|
|
||||||
.getOrElse(
|
|
||||||
if (fallback) BinaryUtil.noPreview(req.some).getOrElseF(notFound)
|
|
||||||
else notFound
|
|
||||||
)
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case HEAD -> Root / Ident(id) / "preview" =>
|
|
||||||
for {
|
|
||||||
preview <- backend.itemSearch.findItemPreview(id, user.account.collective)
|
|
||||||
resp <-
|
|
||||||
preview
|
|
||||||
.map(data => BinaryUtil.withResponseHeaders(dsl, Ok())(data))
|
|
||||||
.getOrElse(NotFound(BasicResult(false, "Not found")))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ POST -> Root / Ident(id) / "reprocess" =>
|
|
||||||
for {
|
|
||||||
data <- req.as[IdList]
|
|
||||||
_ <- logger.debug(s"Re-process item ${id.id}")
|
|
||||||
res <- backend.item.reprocess(id, data.ids, user.account)
|
|
||||||
resp <- Ok(Conversions.basicResult(res, "Re-process task submitted."))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ PUT -> Root / Ident(id) / "customfield" =>
|
|
||||||
for {
|
|
||||||
data <- req.as[CustomFieldValue]
|
|
||||||
res <- backend.customFields.setValue(
|
|
||||||
id,
|
|
||||||
SetValue(data.field, data.value, user.account.collective)
|
|
||||||
)
|
|
||||||
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
|
||||||
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
|
||||||
resp <- Ok(Conversions.basicResult(res.value))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case req @ DELETE -> Root / Ident(id) / "customfield" / Ident(fieldId) =>
|
|
||||||
for {
|
|
||||||
res <- backend.customFields.deleteValue(
|
|
||||||
RemoveValue(fieldId, NonEmptyList.of(id), user.account.collective)
|
|
||||||
)
|
|
||||||
baseUrl = ClientRequestInfo.getBaseUrl(cfg, req)
|
|
||||||
_ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some))
|
|
||||||
resp <- Ok(Conversions.basicResult(res.value, "Custom field value removed."))
|
|
||||||
} yield resp
|
|
||||||
|
|
||||||
case DELETE -> Root / Ident(id) =>
|
|
||||||
for {
|
|
||||||
n <- backend.item.setDeletedState(NonEmptyList.of(id), user.account.collective)
|
|
||||||
res = BasicResult(n > 0, if (n > 0) "Item deleted" else "Item deletion failed.")
|
|
||||||
resp <- Ok(res)
|
|
||||||
} yield resp
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def searchItems[F[_]: Sync](
|
def searchItems[F[_]: Sync](
|
||||||
|
@ -0,0 +1,214 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.restserver.routes
|
||||||
|
|
||||||
|
import java.time.LocalDate
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.syntax.all._
|
||||||
|
|
||||||
|
import docspell.backend.BackendApp
|
||||||
|
import docspell.backend.auth.AuthToken
|
||||||
|
import docspell.backend.ops.search.QueryParseResult
|
||||||
|
import docspell.common.{Duration, SearchMode, Timestamp}
|
||||||
|
import docspell.query.FulltextExtract.Result
|
||||||
|
import docspell.restapi.model._
|
||||||
|
import docspell.restserver.Config
|
||||||
|
import docspell.restserver.conv.Conversions
|
||||||
|
import docspell.restserver.http4s.{QueryParam => QP}
|
||||||
|
import docspell.store.qb.Batch
|
||||||
|
import docspell.store.queries.ListItemWithTags
|
||||||
|
|
||||||
|
import org.http4s.circe.CirceEntityCodec._
|
||||||
|
import org.http4s.dsl.Http4sDsl
|
||||||
|
import org.http4s.{HttpRoutes, Response}
|
||||||
|
|
||||||
|
final class ItemSearchPart[F[_]: Async](
|
||||||
|
backend: BackendApp[F],
|
||||||
|
cfg: Config,
|
||||||
|
authToken: AuthToken
|
||||||
|
) extends Http4sDsl[F] {
|
||||||
|
|
||||||
|
private[this] val logger = docspell.logging.getLogger[F]
|
||||||
|
|
||||||
|
def routes: HttpRoutes[F] =
|
||||||
|
if (!cfg.featureSearch2) HttpRoutes.empty
|
||||||
|
else
|
||||||
|
HttpRoutes.of {
|
||||||
|
case GET -> Root / "search" :? QP.Query(q) :? QP.Limit(limit) :? QP.Offset(
|
||||||
|
offset
|
||||||
|
) :? QP.WithDetails(detailFlag) :? QP.SearchKind(searchMode) =>
|
||||||
|
val userQuery =
|
||||||
|
ItemQuery(offset, limit, detailFlag, searchMode, q.getOrElse(""))
|
||||||
|
for {
|
||||||
|
today <- Timestamp.current[F].map(_.toUtcDate)
|
||||||
|
resp <- search(userQuery, today)
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / "search" =>
|
||||||
|
for {
|
||||||
|
timed <- Duration.stopTime[F]
|
||||||
|
userQuery <- req.as[ItemQuery]
|
||||||
|
today <- Timestamp.current[F].map(_.toUtcDate)
|
||||||
|
resp <- search(userQuery, today)
|
||||||
|
dur <- timed
|
||||||
|
_ <- logger.debug(s"Search request: ${dur.formatExact}")
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case GET -> Root / "searchStats" :? QP.Query(q) :? QP.SearchKind(searchMode) =>
|
||||||
|
val userQuery = ItemQuery(None, None, None, searchMode, q.getOrElse(""))
|
||||||
|
for {
|
||||||
|
today <- Timestamp.current[F].map(_.toUtcDate)
|
||||||
|
resp <- searchStats(userQuery, today)
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / "searchStats" =>
|
||||||
|
for {
|
||||||
|
timed <- Duration.stopTime[F]
|
||||||
|
userQuery <- req.as[ItemQuery]
|
||||||
|
today <- Timestamp.current[F].map(_.toUtcDate)
|
||||||
|
resp <- searchStats(userQuery, today)
|
||||||
|
dur <- timed
|
||||||
|
_ <- logger.debug(s"Search stats request: ${dur.formatExact}")
|
||||||
|
} yield resp
|
||||||
|
}
|
||||||
|
|
||||||
|
def searchStats(userQuery: ItemQuery, today: LocalDate): F[Response[F]] = {
|
||||||
|
val mode = userQuery.searchMode.getOrElse(SearchMode.Normal)
|
||||||
|
parsedQuery(userQuery, mode)
|
||||||
|
.fold(
|
||||||
|
identity,
|
||||||
|
res =>
|
||||||
|
for {
|
||||||
|
summary <- backend.search.searchSummary(today)(res.q, res.ftq)
|
||||||
|
resp <- Ok(Conversions.mkSearchStats(summary))
|
||||||
|
} yield resp
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def search(userQuery: ItemQuery, today: LocalDate): F[Response[F]] = {
|
||||||
|
val details = userQuery.withDetails.getOrElse(false)
|
||||||
|
val batch =
|
||||||
|
Batch(userQuery.offset.getOrElse(0), userQuery.limit.getOrElse(cfg.maxItemPageSize))
|
||||||
|
.restrictLimitTo(cfg.maxItemPageSize)
|
||||||
|
val limitCapped = userQuery.limit.exists(_ > cfg.maxItemPageSize)
|
||||||
|
val mode = userQuery.searchMode.getOrElse(SearchMode.Normal)
|
||||||
|
|
||||||
|
parsedQuery(userQuery, mode)
|
||||||
|
.fold(
|
||||||
|
identity,
|
||||||
|
res =>
|
||||||
|
for {
|
||||||
|
items <- backend.search
|
||||||
|
.searchSelect(details, cfg.maxNoteLength, today, batch)(
|
||||||
|
res.q,
|
||||||
|
res.ftq
|
||||||
|
)
|
||||||
|
|
||||||
|
// order is always by date unless q is empty and ftq is not
|
||||||
|
// TODO this should be given explicitly by the result
|
||||||
|
ftsOrder = res.q.cond.isEmpty && res.ftq.isDefined
|
||||||
|
|
||||||
|
resp <- Ok(convert(items, batch, limitCapped, ftsOrder))
|
||||||
|
} yield resp
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def parsedQuery(
|
||||||
|
userQuery: ItemQuery,
|
||||||
|
mode: SearchMode
|
||||||
|
): Either[F[Response[F]], QueryParseResult.Success] =
|
||||||
|
backend.search.parseQueryString(authToken.account, mode, userQuery.query) match {
|
||||||
|
case s: QueryParseResult.Success =>
|
||||||
|
Right(s.withFtsEnabled(cfg.fullTextSearch.enabled))
|
||||||
|
|
||||||
|
case QueryParseResult.ParseFailed(err) =>
|
||||||
|
Left(BadRequest(BasicResult(false, s"Invalid query: $err")))
|
||||||
|
|
||||||
|
case QueryParseResult.FulltextMismatch(Result.TooMany) =>
|
||||||
|
Left(
|
||||||
|
BadRequest(
|
||||||
|
BasicResult(false, "Only one fulltext search expression is allowed.")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
case QueryParseResult.FulltextMismatch(Result.UnsupportedPosition) =>
|
||||||
|
Left(
|
||||||
|
BadRequest(
|
||||||
|
BasicResult(
|
||||||
|
false,
|
||||||
|
"A fulltext search may only appear in the root and expression."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def convert(
|
||||||
|
items: Vector[ListItemWithTags],
|
||||||
|
batch: Batch,
|
||||||
|
capped: Boolean,
|
||||||
|
ftsOrder: Boolean
|
||||||
|
): ItemLightList =
|
||||||
|
if (ftsOrder)
|
||||||
|
ItemLightList(
|
||||||
|
List(ItemLightGroup("Results", items.map(convertItem).toList)),
|
||||||
|
batch.limit,
|
||||||
|
batch.offset,
|
||||||
|
capped
|
||||||
|
)
|
||||||
|
else {
|
||||||
|
val groups = items.groupBy(ti => ti.item.date.toUtcDate.toString.substring(0, 7))
|
||||||
|
|
||||||
|
def mkGroup(g: (String, Vector[ListItemWithTags])): ItemLightGroup =
|
||||||
|
ItemLightGroup(g._1, g._2.map(convertItem).toList)
|
||||||
|
|
||||||
|
val gs =
|
||||||
|
groups.map(mkGroup).toList.sortWith((g1, g2) => g1.name.compareTo(g2.name) >= 0)
|
||||||
|
|
||||||
|
ItemLightList(gs, batch.limit, batch.offset, capped)
|
||||||
|
}
|
||||||
|
|
||||||
|
def convertItem(item: ListItemWithTags): ItemLight =
|
||||||
|
ItemLight(
|
||||||
|
id = item.item.id,
|
||||||
|
name = item.item.name,
|
||||||
|
state = item.item.state,
|
||||||
|
date = item.item.date,
|
||||||
|
dueDate = item.item.dueDate,
|
||||||
|
source = item.item.source,
|
||||||
|
direction = item.item.direction.name.some,
|
||||||
|
corrOrg = item.item.corrOrg.map(Conversions.mkIdName),
|
||||||
|
corrPerson = item.item.corrPerson.map(Conversions.mkIdName),
|
||||||
|
concPerson = item.item.concPerson.map(Conversions.mkIdName),
|
||||||
|
concEquipment = item.item.concEquip.map(Conversions.mkIdName),
|
||||||
|
folder = item.item.folder.map(Conversions.mkIdName),
|
||||||
|
attachments = item.attachments.map(Conversions.mkAttachmentLight),
|
||||||
|
tags = item.tags.map(Conversions.mkTag),
|
||||||
|
customfields = item.customfields.map(Conversions.mkItemFieldValue),
|
||||||
|
relatedItems = item.relatedItems,
|
||||||
|
notes = item.item.notes,
|
||||||
|
highlighting = item.item.decodeContext match {
|
||||||
|
case Some(Right(hlctx)) =>
|
||||||
|
hlctx.map(c => HighlightEntry(c.name, c.context))
|
||||||
|
case Some(Left(err)) =>
|
||||||
|
logger.asUnsafe.error(
|
||||||
|
s"Internal error: cannot decode highlight context '${item.item.context}': $err"
|
||||||
|
)
|
||||||
|
Nil
|
||||||
|
case None =>
|
||||||
|
Nil
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
object ItemSearchPart {
|
||||||
|
def apply[F[_]: Async](
|
||||||
|
backend: BackendApp[F],
|
||||||
|
cfg: Config,
|
||||||
|
token: AuthToken
|
||||||
|
): ItemSearchPart[F] =
|
||||||
|
new ItemSearchPart[F](backend, cfg, token)
|
||||||
|
}
|
53
modules/store/src/main/scala/docspell/store/Db.scala
Normal file
53
modules/store/src/main/scala/docspell/store/Db.scala
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
sealed trait Db {
|
||||||
|
def name: String
|
||||||
|
def driverClass: String
|
||||||
|
|
||||||
|
def fold[A](fpg: => A, fm: => A, fh2: => A): A
|
||||||
|
}
|
||||||
|
|
||||||
|
object Db {
|
||||||
|
|
||||||
|
case object PostgreSQL extends Db {
|
||||||
|
val name = "postgresql"
|
||||||
|
val driverClass = "org.postgresql.Driver"
|
||||||
|
def fold[A](fpg: => A, fm: => A, fh2: => A): A = fpg
|
||||||
|
}
|
||||||
|
|
||||||
|
case object MariaDB extends Db {
|
||||||
|
val name = "mariadb"
|
||||||
|
val driverClass = "org.mariadb.jdbc.Driver"
|
||||||
|
def fold[A](fpg: => A, fm: => A, fh2: => A): A = fm
|
||||||
|
}
|
||||||
|
|
||||||
|
case object H2 extends Db {
|
||||||
|
val name = "h2"
|
||||||
|
val driverClass = "org.h2.Driver"
|
||||||
|
def fold[A](fpg: => A, fm: => A, fh2: => A): A = fh2
|
||||||
|
}
|
||||||
|
|
||||||
|
val all: NonEmptyList[Db] = NonEmptyList.of(PostgreSQL, MariaDB, H2)
|
||||||
|
|
||||||
|
def fromString(str: String): Either[String, Db] =
|
||||||
|
all.find(_.name.equalsIgnoreCase(str)).toRight(s"Unsupported db name: $str")
|
||||||
|
|
||||||
|
def unsafeFromString(str: String): Db =
|
||||||
|
fromString(str).fold(sys.error, identity)
|
||||||
|
|
||||||
|
implicit val jsonDecoder: Decoder[Db] =
|
||||||
|
Decoder.decodeString.emap(fromString)
|
||||||
|
|
||||||
|
implicit val jsonEncoder: Encoder[Db] =
|
||||||
|
Encoder.encodeString.contramap(_.name)
|
||||||
|
}
|
@ -10,35 +10,21 @@ import docspell.common.LenientUri
|
|||||||
|
|
||||||
case class JdbcConfig(url: LenientUri, user: String, password: String) {
|
case class JdbcConfig(url: LenientUri, user: String, password: String) {
|
||||||
|
|
||||||
val dbmsName: Option[String] =
|
val dbms: Db =
|
||||||
JdbcConfig.extractDbmsName(url)
|
JdbcConfig.extractDbmsName(url).fold(sys.error, identity)
|
||||||
|
|
||||||
def driverClass =
|
|
||||||
dbmsName match {
|
|
||||||
case Some("mariadb") =>
|
|
||||||
"org.mariadb.jdbc.Driver"
|
|
||||||
case Some("postgresql") =>
|
|
||||||
"org.postgresql.Driver"
|
|
||||||
case Some("h2") =>
|
|
||||||
"org.h2.Driver"
|
|
||||||
case Some("sqlite") =>
|
|
||||||
"org.sqlite.JDBC"
|
|
||||||
case Some(n) =>
|
|
||||||
sys.error(s"Unknown DBMS: $n")
|
|
||||||
case None =>
|
|
||||||
sys.error("No JDBC url specified")
|
|
||||||
}
|
|
||||||
|
|
||||||
override def toString: String =
|
override def toString: String =
|
||||||
s"JdbcConfig(${url.asString}, $user, ***)"
|
s"JdbcConfig(${url.asString}, $user, ***)"
|
||||||
}
|
}
|
||||||
|
|
||||||
object JdbcConfig {
|
object JdbcConfig {
|
||||||
def extractDbmsName(jdbcUrl: LenientUri): Option[String] =
|
private def extractDbmsName(jdbcUrl: LenientUri): Either[String, Db] =
|
||||||
jdbcUrl.scheme.head match {
|
jdbcUrl.scheme.head match {
|
||||||
case "jdbc" =>
|
case "jdbc" =>
|
||||||
jdbcUrl.scheme.tail.headOption
|
jdbcUrl.scheme.tail.headOption
|
||||||
|
.map(Db.fromString)
|
||||||
|
.getOrElse(Left(s"Invalid jdbc url: ${jdbcUrl.asString}"))
|
||||||
case _ =>
|
case _ =>
|
||||||
None
|
Left(s"No scheme provided for url: ${jdbcUrl.asString}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -36,6 +36,8 @@ trait Store[F[_]] {
|
|||||||
def add(insert: ConnectionIO[Int], exists: ConnectionIO[Boolean]): F[AddResult]
|
def add(insert: ConnectionIO[Int], exists: ConnectionIO[Boolean]): F[AddResult]
|
||||||
|
|
||||||
def transactor: Transactor[F]
|
def transactor: Transactor[F]
|
||||||
|
|
||||||
|
def dbms: Db
|
||||||
}
|
}
|
||||||
|
|
||||||
object Store {
|
object Store {
|
||||||
@ -55,7 +57,7 @@ object Store {
|
|||||||
ds.setJdbcUrl(jdbc.url.asString)
|
ds.setJdbcUrl(jdbc.url.asString)
|
||||||
ds.setUsername(jdbc.user)
|
ds.setUsername(jdbc.user)
|
||||||
ds.setPassword(jdbc.password)
|
ds.setPassword(jdbc.password)
|
||||||
ds.setDriverClassName(jdbc.driverClass)
|
ds.setDriverClassName(jdbc.dbms.driverClass)
|
||||||
}
|
}
|
||||||
xa = HikariTransactor(ds, connectEC)
|
xa = HikariTransactor(ds, connectEC)
|
||||||
fr = FileRepository.apply(xa, ds, fileRepoConfig, true)
|
fr = FileRepository.apply(xa, ds, fileRepoConfig, true)
|
||||||
|
@ -0,0 +1,30 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.fts
|
||||||
|
|
||||||
|
import docspell.store.impl.DoobieMeta.jsonMeta
|
||||||
|
|
||||||
|
import doobie.Meta
|
||||||
|
import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder}
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
/** Highlighting context from a fulltext search.
|
||||||
|
*
|
||||||
|
* @param name
|
||||||
|
* the document name, either attachment name or "item"
|
||||||
|
* @param context
|
||||||
|
* lines with highlighting infos
|
||||||
|
*/
|
||||||
|
case class ContextEntry(name: String, context: List[String])
|
||||||
|
|
||||||
|
object ContextEntry {
|
||||||
|
implicit val jsonDecoder: Decoder[ContextEntry] = deriveDecoder
|
||||||
|
implicit val jsonEncoder: Encoder[ContextEntry] = deriveEncoder
|
||||||
|
|
||||||
|
implicit val meta: Meta[ContextEntry] =
|
||||||
|
jsonMeta[ContextEntry]
|
||||||
|
}
|
@ -0,0 +1,88 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.fts
|
||||||
|
|
||||||
|
import cats.Foldable
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
import cats.effect.Sync
|
||||||
|
import cats.syntax.all._
|
||||||
|
import fs2.Pipe
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.ftsclient.FtsResult
|
||||||
|
import docspell.store.Db
|
||||||
|
import docspell.store.qb.DSL._
|
||||||
|
import docspell.store.qb._
|
||||||
|
|
||||||
|
import doobie._
|
||||||
|
import doobie.implicits._
|
||||||
|
|
||||||
|
/** Temporary table used to store item ids fetched from fulltext search */
|
||||||
|
case class RFtsResult(id: Ident, score: Option[Double], context: Option[ContextEntry])
|
||||||
|
|
||||||
|
object RFtsResult {
|
||||||
|
def fromResult(result: FtsResult)(m: FtsResult.ItemMatch): RFtsResult = {
|
||||||
|
val context = m.data match {
|
||||||
|
case FtsResult.AttachmentData(_, attachName) =>
|
||||||
|
result.highlight
|
||||||
|
.get(m.id)
|
||||||
|
.filter(_.nonEmpty)
|
||||||
|
.map(str => ContextEntry(attachName, str))
|
||||||
|
|
||||||
|
case FtsResult.ItemData =>
|
||||||
|
result.highlight
|
||||||
|
.get(m.id)
|
||||||
|
.filter(_.nonEmpty)
|
||||||
|
.map(str => ContextEntry("item", str))
|
||||||
|
}
|
||||||
|
RFtsResult(m.itemId, m.score.some, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
def prepareTable(db: Db, name: String): Pipe[ConnectionIO, FtsResult, Table] =
|
||||||
|
TempFtsOps.prepareTable(db, name)
|
||||||
|
|
||||||
|
case class Table(tableName: String, alias: Option[String], dbms: Db) extends TableDef {
|
||||||
|
val id: Column[Ident] = Column("id", this)
|
||||||
|
val score: Column[Double] = Column("score", this)
|
||||||
|
val context: Column[ContextEntry] = Column("context", this)
|
||||||
|
|
||||||
|
val all: NonEmptyList[Column[_]] = NonEmptyList.of(id, score, context)
|
||||||
|
|
||||||
|
def as(newAlias: String): Table = copy(alias = Some(newAlias))
|
||||||
|
|
||||||
|
def distinctCte(name: String) =
|
||||||
|
dbms.fold(
|
||||||
|
TempFtsOps.distinctCtePg(this, name),
|
||||||
|
TempFtsOps.distinctCteMaria(this, name),
|
||||||
|
TempFtsOps.distinctCteH2(this, name)
|
||||||
|
)
|
||||||
|
|
||||||
|
def distinctCteSimple(name: String) =
|
||||||
|
CteBind(copy(tableName = name) -> Select(select(id), from(this)).distinct)
|
||||||
|
|
||||||
|
def insertAll[F[_]: Foldable](rows: F[RFtsResult]): ConnectionIO[Int] =
|
||||||
|
TempFtsOps.insertBatch(this, rows)
|
||||||
|
|
||||||
|
def dropTable: ConnectionIO[Int] =
|
||||||
|
TempFtsOps.dropTable(Fragment.const0(tableName)).update.run
|
||||||
|
|
||||||
|
def createIndex: ConnectionIO[Unit] = {
|
||||||
|
val analyze = dbms.fold(
|
||||||
|
TempFtsOps.analyzeTablePg(this),
|
||||||
|
cio.unit,
|
||||||
|
cio.unit
|
||||||
|
)
|
||||||
|
|
||||||
|
TempFtsOps.createIndex(this) *> analyze
|
||||||
|
}
|
||||||
|
|
||||||
|
def insert: Pipe[ConnectionIO, FtsResult, Int] =
|
||||||
|
in => in.evalMap(res => insertAll(res.results.map(RFtsResult.fromResult(res))))
|
||||||
|
}
|
||||||
|
|
||||||
|
private val cio: Sync[ConnectionIO] = Sync[ConnectionIO]
|
||||||
|
}
|
190
modules/store/src/main/scala/docspell/store/fts/TempFtsOps.scala
Normal file
190
modules/store/src/main/scala/docspell/store/fts/TempFtsOps.scala
Normal file
@ -0,0 +1,190 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.fts
|
||||||
|
|
||||||
|
import cats.syntax.all._
|
||||||
|
import cats.{Foldable, Monad}
|
||||||
|
import fs2.{Pipe, Stream}
|
||||||
|
|
||||||
|
import docspell.common.Duration
|
||||||
|
import docspell.ftsclient.FtsResult
|
||||||
|
import docspell.store.Db
|
||||||
|
import docspell.store.fts.RFtsResult.Table
|
||||||
|
import docspell.store.qb.DSL._
|
||||||
|
import docspell.store.qb._
|
||||||
|
|
||||||
|
import doobie._
|
||||||
|
import doobie.implicits._
|
||||||
|
|
||||||
|
private[fts] object TempFtsOps {
|
||||||
|
private[this] val logger = docspell.logging.getLogger[ConnectionIO]
|
||||||
|
|
||||||
|
def createTable(db: Db, name: String): ConnectionIO[Table] = {
|
||||||
|
val stmt = db.fold(
|
||||||
|
createTablePostgreSQL(Fragment.const(name)),
|
||||||
|
createTableMariaDB(Fragment.const0(name)),
|
||||||
|
createTableH2(Fragment.const0(name))
|
||||||
|
)
|
||||||
|
stmt.as(Table(name, None, db))
|
||||||
|
}
|
||||||
|
|
||||||
|
def prepareTable(db: Db, name: String): Pipe[ConnectionIO, FtsResult, Table] =
|
||||||
|
in =>
|
||||||
|
for {
|
||||||
|
timed <- Stream.eval(Duration.stopTime[ConnectionIO])
|
||||||
|
tt <- Stream.eval(createTable(db, name))
|
||||||
|
n <- in.through(tt.insert).foldMonoid
|
||||||
|
_ <- if (n > 500) Stream.eval(tt.createIndex) else Stream(())
|
||||||
|
duration <- Stream.eval(timed)
|
||||||
|
_ <- Stream.eval(
|
||||||
|
logger.debug(
|
||||||
|
s"Creating temporary fts table ($n elements) took: ${duration.formatExact}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
} yield tt
|
||||||
|
|
||||||
|
def dropTable(name: Fragment): Fragment =
|
||||||
|
sql"""DROP TABLE IF EXISTS $name"""
|
||||||
|
|
||||||
|
private def createTableH2(name: Fragment): ConnectionIO[Int] =
|
||||||
|
sql"""${dropTable(name)}; CREATE LOCAL TEMPORARY TABLE $name (
|
||||||
|
| id varchar not null,
|
||||||
|
| score double precision,
|
||||||
|
| context text
|
||||||
|
|);""".stripMargin.update.run
|
||||||
|
|
||||||
|
private def createTableMariaDB(name: Fragment): ConnectionIO[Int] =
|
||||||
|
dropTable(name).update.run *>
|
||||||
|
sql"""CREATE TEMPORARY TABLE $name (
|
||||||
|
| id varchar(254) not null,
|
||||||
|
| score double,
|
||||||
|
| context mediumtext
|
||||||
|
|)""".stripMargin.update.run
|
||||||
|
|
||||||
|
private def createTablePostgreSQL(name: Fragment): ConnectionIO[Int] =
|
||||||
|
sql"""CREATE TEMPORARY TABLE IF NOT EXISTS $name (
|
||||||
|
| id varchar not null,
|
||||||
|
| score double precision,
|
||||||
|
| context text
|
||||||
|
|) ON COMMIT DROP;""".stripMargin.update.run
|
||||||
|
|
||||||
|
def createIndex(table: Table): ConnectionIO[Unit] = {
|
||||||
|
val tableName = Fragment.const0(table.tableName)
|
||||||
|
|
||||||
|
val idIdxName = Fragment.const0(s"${table.tableName}_id_idx")
|
||||||
|
val id = Fragment.const0(table.id.name)
|
||||||
|
val scoreIdxName = Fragment.const0(s"${table.tableName}_score_idx")
|
||||||
|
val score = Fragment.const0(table.score.name)
|
||||||
|
|
||||||
|
sql"CREATE INDEX IF NOT EXISTS $idIdxName ON $tableName($id)".update.run.void *>
|
||||||
|
sql"CREATE INDEX IF NOT EXISTS $scoreIdxName ON $tableName($score)".update.run.void
|
||||||
|
}
|
||||||
|
|
||||||
|
def analyzeTablePg(table: Table): ConnectionIO[Unit] = {
|
||||||
|
val tableName = Fragment.const0(table.tableName)
|
||||||
|
sql"ANALYZE $tableName".update.run.void
|
||||||
|
}
|
||||||
|
|
||||||
|
// // slowest (9 runs, 6000 rows each, ~170ms)
|
||||||
|
// def insertBatch2[F[_]: Foldable](table: Table, rows: F[RFtsResult]) = {
|
||||||
|
// val sql =
|
||||||
|
// s"""INSERT INTO ${table.tableName}
|
||||||
|
// | (${table.id.name}, ${table.score.name}, ${table.context.name})
|
||||||
|
// | VALUES (?, ?, ?)""".stripMargin
|
||||||
|
//
|
||||||
|
// Update[RFtsResult](sql).updateMany(rows)
|
||||||
|
// }
|
||||||
|
|
||||||
|
// // better (~115ms)
|
||||||
|
// def insertBatch3[F[_]: Foldable](
|
||||||
|
// table: Table,
|
||||||
|
// rows: F[RFtsResult]
|
||||||
|
// ): ConnectionIO[Int] = {
|
||||||
|
// val values = rows
|
||||||
|
// .foldl(List.empty[Fragment]) { (res, row) =>
|
||||||
|
// sql"(${row.id},${row.score},${row.context})" :: res
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// DML.insertMulti(table, table.all, values)
|
||||||
|
// }
|
||||||
|
|
||||||
|
// ~96ms
|
||||||
|
def insertBatch[F[_]: Foldable](
|
||||||
|
table: Table,
|
||||||
|
rows: F[RFtsResult]
|
||||||
|
): ConnectionIO[Int] = {
|
||||||
|
val values = rows
|
||||||
|
.foldl(List.empty[String]) { (res, _) =>
|
||||||
|
"(?,?,?)" :: res
|
||||||
|
}
|
||||||
|
.mkString(",")
|
||||||
|
if (values.isEmpty) Monad[ConnectionIO].pure(0)
|
||||||
|
else {
|
||||||
|
val sql =
|
||||||
|
s"""INSERT INTO ${table.tableName}
|
||||||
|
| (${table.id.name}, ${table.score.name}, ${table.context.name})
|
||||||
|
| VALUES $values""".stripMargin
|
||||||
|
|
||||||
|
val encoder = io.circe.Encoder[ContextEntry]
|
||||||
|
doobie.free.FC.raw { conn =>
|
||||||
|
val pst = conn.prepareStatement(sql)
|
||||||
|
rows.foldl(0) { (index, row) =>
|
||||||
|
pst.setString(index + 1, row.id.id)
|
||||||
|
row.score
|
||||||
|
.fold(pst.setNull(index + 2, java.sql.Types.DOUBLE))(d =>
|
||||||
|
pst.setDouble(index + 2, d)
|
||||||
|
)
|
||||||
|
row.context
|
||||||
|
.fold(pst.setNull(index + 3, java.sql.Types.VARCHAR))(c =>
|
||||||
|
pst.setString(index + 3, encoder(c).noSpaces)
|
||||||
|
)
|
||||||
|
index + 3
|
||||||
|
}
|
||||||
|
pst.executeUpdate()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def distinctCtePg(table: Table, name: String): CteBind =
|
||||||
|
CteBind(
|
||||||
|
table.copy(tableName = name) ->
|
||||||
|
Select(
|
||||||
|
select(
|
||||||
|
table.id.s,
|
||||||
|
max(table.score).as(table.score.name),
|
||||||
|
rawFunction("string_agg", table.context.s, lit("','")).as(table.context.name)
|
||||||
|
),
|
||||||
|
from(table)
|
||||||
|
).groupBy(table.id)
|
||||||
|
)
|
||||||
|
|
||||||
|
def distinctCteMaria(table: Table, name: String): CteBind =
|
||||||
|
CteBind(
|
||||||
|
table.copy(tableName = name) ->
|
||||||
|
Select(
|
||||||
|
select(
|
||||||
|
table.id.s,
|
||||||
|
max(table.score).as(table.score.name),
|
||||||
|
rawFunction("group_concat", table.context.s).as(table.context.name)
|
||||||
|
),
|
||||||
|
from(table)
|
||||||
|
).groupBy(table.id)
|
||||||
|
)
|
||||||
|
|
||||||
|
def distinctCteH2(table: Table, name: String): CteBind =
|
||||||
|
CteBind(
|
||||||
|
table.copy(tableName = name) ->
|
||||||
|
Select(
|
||||||
|
select(
|
||||||
|
table.id.s,
|
||||||
|
max(table.score).as(table.score.name),
|
||||||
|
rawFunction("listagg", table.context.s, lit("','")).as(table.context.name)
|
||||||
|
),
|
||||||
|
from(table)
|
||||||
|
).groupBy(table.id)
|
||||||
|
)
|
||||||
|
}
|
@ -29,6 +29,8 @@ final class StoreImpl[F[_]: Async](
|
|||||||
) extends Store[F] {
|
) extends Store[F] {
|
||||||
private[this] val xa = transactor
|
private[this] val xa = transactor
|
||||||
|
|
||||||
|
val dbms = jdbc.dbms
|
||||||
|
|
||||||
def createFileRepository(
|
def createFileRepository(
|
||||||
cfg: FileRepositoryConfig,
|
cfg: FileRepositoryConfig,
|
||||||
withAttributeStore: Boolean
|
withAttributeStore: Boolean
|
||||||
|
@ -26,15 +26,7 @@ class FlywayMigrate[F[_]: Sync](
|
|||||||
private[this] val logger = docspell.logging.getLogger[F]
|
private[this] val logger = docspell.logging.getLogger[F]
|
||||||
|
|
||||||
private def createLocations(folder: String) =
|
private def createLocations(folder: String) =
|
||||||
jdbc.dbmsName match {
|
List(s"classpath:db/$folder/${jdbc.dbms.name}", s"classpath:db/$folder/common")
|
||||||
case Some(dbtype) =>
|
|
||||||
List(s"classpath:db/$folder/$dbtype", s"classpath:db/$folder/common")
|
|
||||||
case None =>
|
|
||||||
logger.warn(
|
|
||||||
s"Cannot read database name from jdbc url: ${jdbc.url}. Go with H2"
|
|
||||||
)
|
|
||||||
List(s"classpath:db/$folder/h2", s"classpath:db/$folder/common")
|
|
||||||
}
|
|
||||||
|
|
||||||
def createFlyway(kind: MigrationKind): F[Flyway] =
|
def createFlyway(kind: MigrationKind): F[Flyway] =
|
||||||
for {
|
for {
|
||||||
|
@ -43,6 +43,8 @@ object DBFunction {
|
|||||||
|
|
||||||
case class Concat(exprs: NonEmptyList[SelectExpr]) extends DBFunction
|
case class Concat(exprs: NonEmptyList[SelectExpr]) extends DBFunction
|
||||||
|
|
||||||
|
case class Raw(name: String, exprs: NonEmptyList[SelectExpr]) extends DBFunction
|
||||||
|
|
||||||
sealed trait Operator
|
sealed trait Operator
|
||||||
object Operator {
|
object Operator {
|
||||||
case object Plus extends Operator
|
case object Plus extends Operator
|
||||||
|
@ -41,6 +41,17 @@ object DML extends DoobieMeta {
|
|||||||
): ConnectionIO[Int] =
|
): ConnectionIO[Int] =
|
||||||
insertFragment(table, cols, values).update.run
|
insertFragment(table, cols, values).update.run
|
||||||
|
|
||||||
|
def insertMulti(
|
||||||
|
table: TableDef,
|
||||||
|
cols: Nel[Column[_]],
|
||||||
|
values: Seq[Fragment]
|
||||||
|
): ConnectionIO[Int] =
|
||||||
|
(fr"INSERT INTO ${FromExprBuilder.buildTable(table)} (" ++
|
||||||
|
cols
|
||||||
|
.map(SelectExprBuilder.columnNoPrefix)
|
||||||
|
.reduceLeft(_ ++ comma ++ _) ++
|
||||||
|
fr") VALUES ${values.reduce(_ ++ comma ++ _)}").update.run
|
||||||
|
|
||||||
def insertFragment(
|
def insertFragment(
|
||||||
table: TableDef,
|
table: TableDef,
|
||||||
cols: Nel[Column[_]],
|
cols: Nel[Column[_]],
|
||||||
|
@ -122,6 +122,9 @@ trait DSL extends DoobieMeta {
|
|||||||
def concat(expr: SelectExpr, exprs: SelectExpr*): DBFunction =
|
def concat(expr: SelectExpr, exprs: SelectExpr*): DBFunction =
|
||||||
DBFunction.Concat(Nel.of(expr, exprs: _*))
|
DBFunction.Concat(Nel.of(expr, exprs: _*))
|
||||||
|
|
||||||
|
def rawFunction(name: String, expr: SelectExpr, more: SelectExpr*): DBFunction =
|
||||||
|
DBFunction.Raw(name, Nel.of(expr, more: _*))
|
||||||
|
|
||||||
def const[A](value: A)(implicit P: Put[A]): SelectExpr.SelectConstant[A] =
|
def const[A](value: A)(implicit P: Put[A]): SelectExpr.SelectConstant[A] =
|
||||||
SelectExpr.SelectConstant(value, None)
|
SelectExpr.SelectConstant(value, None)
|
||||||
|
|
||||||
|
@ -61,6 +61,11 @@ object DBFunctionBuilder extends CommonBuilder {
|
|||||||
|
|
||||||
case DBFunction.Sum(expr) =>
|
case DBFunction.Sum(expr) =>
|
||||||
sql"SUM(" ++ SelectExprBuilder.build(expr) ++ fr")"
|
sql"SUM(" ++ SelectExprBuilder.build(expr) ++ fr")"
|
||||||
|
|
||||||
|
case DBFunction.Raw(name, exprs) =>
|
||||||
|
val n = Fragment.const0(name)
|
||||||
|
val inner = exprs.map(SelectExprBuilder.build).toList.reduce(_ ++ comma ++ _)
|
||||||
|
sql"$n($inner)"
|
||||||
}
|
}
|
||||||
|
|
||||||
def buildOperator(op: DBFunction.Operator): Fragment =
|
def buildOperator(op: DBFunction.Operator): Fragment =
|
||||||
|
@ -0,0 +1,62 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.queries
|
||||||
|
|
||||||
|
import docspell.store.fts.RFtsResult
|
||||||
|
import docspell.store.qb.DSL._
|
||||||
|
import docspell.store.qb._
|
||||||
|
import docspell.store.records.RItem
|
||||||
|
|
||||||
|
trait FtsSupport {
|
||||||
|
|
||||||
|
implicit final class SelectOps(select: Select) {
|
||||||
|
def joinFtsIdOnly(
|
||||||
|
itemTable: RItem.Table,
|
||||||
|
ftsTable: Option[RFtsResult.Table]
|
||||||
|
): Select =
|
||||||
|
ftsTable match {
|
||||||
|
case Some(ftst) =>
|
||||||
|
val tt = cteTable(ftst)
|
||||||
|
select
|
||||||
|
.appendCte(ftst.distinctCteSimple(tt.tableName))
|
||||||
|
.changeFrom(_.prepend(from(itemTable).innerJoin(tt, itemTable.id === tt.id)))
|
||||||
|
case None =>
|
||||||
|
select
|
||||||
|
}
|
||||||
|
|
||||||
|
def joinFtsDetails(
|
||||||
|
itemTable: RItem.Table,
|
||||||
|
ftsTable: Option[RFtsResult.Table]
|
||||||
|
): Select =
|
||||||
|
ftsTable match {
|
||||||
|
case Some(ftst) =>
|
||||||
|
val tt = cteTable(ftst)
|
||||||
|
select
|
||||||
|
.appendCte(ftst.distinctCte(tt.tableName))
|
||||||
|
.changeFrom(_.prepend(from(itemTable).innerJoin(tt, itemTable.id === tt.id)))
|
||||||
|
case None =>
|
||||||
|
select
|
||||||
|
}
|
||||||
|
|
||||||
|
def ftsCondition(
|
||||||
|
itemTable: RItem.Table,
|
||||||
|
ftsTable: Option[RFtsResult.Table]
|
||||||
|
): Select =
|
||||||
|
ftsTable match {
|
||||||
|
case Some(ftst) =>
|
||||||
|
val ftsIds = Select(ftst.id.s, from(ftst)).distinct
|
||||||
|
select.changeWhere(c => c && itemTable.id.in(ftsIds))
|
||||||
|
case None =>
|
||||||
|
select
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def cteTable(ftsTable: RFtsResult.Table) =
|
||||||
|
ftsTable.copy(tableName = "cte_fts")
|
||||||
|
}
|
||||||
|
|
||||||
|
object FtsSupport extends FtsSupport
|
@ -7,6 +7,7 @@
|
|||||||
package docspell.store.queries
|
package docspell.store.queries
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
import docspell.store.fts.ContextEntry
|
||||||
|
|
||||||
case class ListItem(
|
case class ListItem(
|
||||||
id: Ident,
|
id: Ident,
|
||||||
@ -22,5 +23,24 @@ case class ListItem(
|
|||||||
concPerson: Option[IdRef],
|
concPerson: Option[IdRef],
|
||||||
concEquip: Option[IdRef],
|
concEquip: Option[IdRef],
|
||||||
folder: Option[IdRef],
|
folder: Option[IdRef],
|
||||||
notes: Option[String]
|
notes: Option[String],
|
||||||
)
|
context: Option[String]
|
||||||
|
) {
|
||||||
|
|
||||||
|
def decodeContext: Option[Either[String, List[ContextEntry]]] =
|
||||||
|
context.map(_.trim).filter(_.nonEmpty).map { str =>
|
||||||
|
// This is a bit…. The common denominator for the dbms used is string aggregation
|
||||||
|
// when combining multiple matches. So the `ContextEntry` objects are concatenated and
|
||||||
|
// separated by comma. TemplateFtsTable ensures than the single entries are all json
|
||||||
|
// objects.
|
||||||
|
val jsonStr = s"[ $str ]"
|
||||||
|
io.circe.parser
|
||||||
|
.decode[List[Option[ContextEntry]]](jsonStr)
|
||||||
|
.left
|
||||||
|
.map(_.getMessage)
|
||||||
|
.map(_.flatten)
|
||||||
|
}
|
||||||
|
|
||||||
|
def toWithTags: ListItemWithTags =
|
||||||
|
ListItemWithTags(this, Nil, Nil, Nil, Nil)
|
||||||
|
}
|
||||||
|
@ -18,15 +18,17 @@ import docspell.common.{FileKey, IdRef, _}
|
|||||||
import docspell.query.ItemQuery.Expr.ValidItemStates
|
import docspell.query.ItemQuery.Expr.ValidItemStates
|
||||||
import docspell.query.{ItemQuery, ItemQueryDsl}
|
import docspell.query.{ItemQuery, ItemQueryDsl}
|
||||||
import docspell.store.Store
|
import docspell.store.Store
|
||||||
|
import docspell.store.fts.RFtsResult
|
||||||
import docspell.store.qb.DSL._
|
import docspell.store.qb.DSL._
|
||||||
import docspell.store.qb._
|
import docspell.store.qb._
|
||||||
import docspell.store.qb.generator.{ItemQueryGenerator, Tables}
|
import docspell.store.qb.generator.{ItemQueryGenerator, Tables}
|
||||||
|
import docspell.store.queries.Query.OrderSelect
|
||||||
import docspell.store.records._
|
import docspell.store.records._
|
||||||
|
|
||||||
import doobie.implicits._
|
import doobie.implicits._
|
||||||
import doobie.{Query => _, _}
|
import doobie.{Query => _, _}
|
||||||
|
|
||||||
object QItem {
|
object QItem extends FtsSupport {
|
||||||
private[this] val logger = docspell.logging.getLogger[ConnectionIO]
|
private[this] val logger = docspell.logging.getLogger[ConnectionIO]
|
||||||
|
|
||||||
private val equip = REquipment.as("e")
|
private val equip = REquipment.as("e")
|
||||||
@ -44,6 +46,35 @@ object QItem {
|
|||||||
private val ti = RTagItem.as("ti")
|
private val ti = RTagItem.as("ti")
|
||||||
private val meta = RFileMeta.as("fmeta")
|
private val meta = RFileMeta.as("fmeta")
|
||||||
|
|
||||||
|
private def orderSelect(ftsOpt: Option[RFtsResult.Table]): OrderSelect =
|
||||||
|
new OrderSelect {
|
||||||
|
val item = i
|
||||||
|
val fts = ftsOpt
|
||||||
|
}
|
||||||
|
|
||||||
|
private val emptyString: SelectExpr = const("")
|
||||||
|
|
||||||
|
def queryItems(
|
||||||
|
q: Query,
|
||||||
|
today: LocalDate,
|
||||||
|
maxNoteLen: Int,
|
||||||
|
batch: Batch,
|
||||||
|
ftsTable: Option[RFtsResult.Table]
|
||||||
|
) = {
|
||||||
|
val cteFts = ftsTable.map(cteTable)
|
||||||
|
val sql =
|
||||||
|
findItemsBase(q.fix, today, maxNoteLen, cteFts)
|
||||||
|
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
||||||
|
.joinFtsDetails(i, ftsTable)
|
||||||
|
.limit(batch)
|
||||||
|
.build
|
||||||
|
|
||||||
|
logger.stream.debug(s"List $batch items: $sql").drain ++
|
||||||
|
sql.query[ListItem].stream
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----
|
||||||
|
|
||||||
def countAttachmentsAndItems(items: Nel[Ident]): ConnectionIO[Int] =
|
def countAttachmentsAndItems(items: Nel[Ident]): ConnectionIO[Int] =
|
||||||
Select(count(a.id).s, from(a), a.itemId.in(items)).build
|
Select(count(a.id).s, from(a), a.itemId.in(items)).build
|
||||||
.query[Int]
|
.query[Int]
|
||||||
@ -115,7 +146,12 @@ object QItem {
|
|||||||
ItemQuery.Expr.and(ValidItemStates, ItemQueryDsl.Q.itemIdsIn(nel.map(_.id)))
|
ItemQuery.Expr.and(ValidItemStates, ItemQueryDsl.Q.itemIdsIn(nel.map(_.id)))
|
||||||
val account = AccountId(collective, Ident.unsafe(""))
|
val account = AccountId(collective, Ident.unsafe(""))
|
||||||
|
|
||||||
findItemsBase(Query.Fix(account, Some(expr), None), LocalDate.EPOCH, 0).build
|
findItemsBase(
|
||||||
|
Query.Fix(account, Some(expr), None),
|
||||||
|
LocalDate.EPOCH,
|
||||||
|
0,
|
||||||
|
None
|
||||||
|
).build
|
||||||
.query[ListItem]
|
.query[ListItem]
|
||||||
.to[Vector]
|
.to[Vector]
|
||||||
}
|
}
|
||||||
@ -130,7 +166,12 @@ object QItem {
|
|||||||
cv.itemId === itemId
|
cv.itemId === itemId
|
||||||
).build.query[ItemFieldValue].to[Vector]
|
).build.query[ItemFieldValue].to[Vector]
|
||||||
|
|
||||||
private def findItemsBase(q: Query.Fix, today: LocalDate, noteMaxLen: Int): Select = {
|
private def findItemsBase(
|
||||||
|
q: Query.Fix,
|
||||||
|
today: LocalDate,
|
||||||
|
noteMaxLen: Int,
|
||||||
|
ftsTable: Option[RFtsResult.Table]
|
||||||
|
): Select.Ordered = {
|
||||||
val coll = q.account.collective
|
val coll = q.account.collective
|
||||||
|
|
||||||
Select(
|
Select(
|
||||||
@ -154,8 +195,9 @@ object QItem {
|
|||||||
f.id.s,
|
f.id.s,
|
||||||
f.name.s,
|
f.name.s,
|
||||||
substring(i.notes.s, 1, noteMaxLen).s,
|
substring(i.notes.s, 1, noteMaxLen).s,
|
||||||
q.orderAsc
|
ftsTable.map(_.context.s).getOrElse(emptyString),
|
||||||
.map(of => coalesce(of(i).s, i.created.s).s)
|
q.order
|
||||||
|
.map(f => f(orderSelect(ftsTable)).expr)
|
||||||
.getOrElse(i.created.s)
|
.getOrElse(i.created.s)
|
||||||
),
|
),
|
||||||
from(i)
|
from(i)
|
||||||
@ -172,8 +214,8 @@ object QItem {
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
).orderBy(
|
).orderBy(
|
||||||
q.orderAsc
|
q.order
|
||||||
.map(of => OrderBy.asc(coalesce(of(i).s, i.created.s).s))
|
.map(of => of(orderSelect(ftsTable)))
|
||||||
.getOrElse(OrderBy.desc(coalesce(i.itemDate.s, i.created.s).s))
|
.getOrElse(OrderBy.desc(coalesce(i.itemDate.s, i.created.s).s))
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -184,7 +226,7 @@ object QItem {
|
|||||||
today: LocalDate,
|
today: LocalDate,
|
||||||
maxFiles: Int
|
maxFiles: Int
|
||||||
): Select =
|
): Select =
|
||||||
findItemsBase(q.fix, today, 0)
|
findItemsBase(q.fix, today, 0, None)
|
||||||
.changeFrom(_.innerJoin(a, a.itemId === i.id).innerJoin(as, a.id === as.id))
|
.changeFrom(_.innerJoin(a, a.itemId === i.id).innerJoin(as, a.id === as.id))
|
||||||
.changeFrom(from =>
|
.changeFrom(from =>
|
||||||
ftype match {
|
ftype match {
|
||||||
@ -277,26 +319,22 @@ object QItem {
|
|||||||
today: LocalDate,
|
today: LocalDate,
|
||||||
maxNoteLen: Int,
|
maxNoteLen: Int,
|
||||||
batch: Batch
|
batch: Batch
|
||||||
): Stream[ConnectionIO, ListItem] = {
|
): Stream[ConnectionIO, ListItem] =
|
||||||
val sql = findItemsBase(q.fix, today, maxNoteLen)
|
queryItems(q, today, maxNoteLen, batch, None)
|
||||||
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
|
||||||
.limit(batch)
|
|
||||||
.build
|
|
||||||
logger.stream.trace(s"List $batch items: $sql").drain ++
|
|
||||||
sql.query[ListItem].stream
|
|
||||||
}
|
|
||||||
|
|
||||||
def searchStats(today: LocalDate)(q: Query): ConnectionIO[SearchSummary] =
|
def searchStats(today: LocalDate, ftsTable: Option[RFtsResult.Table])(
|
||||||
|
q: Query
|
||||||
|
): ConnectionIO[SearchSummary] =
|
||||||
for {
|
for {
|
||||||
count <- searchCountSummary(today)(q)
|
count <- searchCountSummary(today, ftsTable)(q)
|
||||||
tags <- searchTagSummary(today)(q)
|
tags <- searchTagSummary(today, ftsTable)(q)
|
||||||
cats <- searchTagCategorySummary(today)(q)
|
cats <- searchTagCategorySummary(today, ftsTable)(q)
|
||||||
fields <- searchFieldSummary(today)(q)
|
fields <- searchFieldSummary(today, ftsTable)(q)
|
||||||
folders <- searchFolderSummary(today)(q)
|
folders <- searchFolderSummary(today, ftsTable)(q)
|
||||||
orgs <- searchCorrOrgSummary(today)(q)
|
orgs <- searchCorrOrgSummary(today, ftsTable)(q)
|
||||||
corrPers <- searchCorrPersonSummary(today)(q)
|
corrPers <- searchCorrPersonSummary(today, ftsTable)(q)
|
||||||
concPers <- searchConcPersonSummary(today)(q)
|
concPers <- searchConcPersonSummary(today, ftsTable)(q)
|
||||||
concEquip <- searchConcEquipSummary(today)(q)
|
concEquip <- searchConcEquipSummary(today, ftsTable)(q)
|
||||||
} yield SearchSummary(
|
} yield SearchSummary(
|
||||||
count,
|
count,
|
||||||
tags,
|
tags,
|
||||||
@ -310,7 +348,8 @@ object QItem {
|
|||||||
)
|
)
|
||||||
|
|
||||||
def searchTagCategorySummary(
|
def searchTagCategorySummary(
|
||||||
today: LocalDate
|
today: LocalDate,
|
||||||
|
ftsTable: Option[RFtsResult.Table]
|
||||||
)(q: Query): ConnectionIO[List[CategoryCount]] = {
|
)(q: Query): ConnectionIO[List[CategoryCount]] = {
|
||||||
val tagFrom =
|
val tagFrom =
|
||||||
from(ti)
|
from(ti)
|
||||||
@ -318,7 +357,8 @@ object QItem {
|
|||||||
.innerJoin(i, i.id === ti.itemId)
|
.innerJoin(i, i.id === ti.itemId)
|
||||||
|
|
||||||
val catCloud =
|
val catCloud =
|
||||||
findItemsBase(q.fix, today, 0).unwrap
|
findItemsBase(q.fix, today, 0, None).unwrap
|
||||||
|
.joinFtsIdOnly(i, ftsTable)
|
||||||
.withSelect(select(tag.category).append(countDistinct(i.id).as("num")))
|
.withSelect(select(tag.category).append(countDistinct(i.id).as("num")))
|
||||||
.changeFrom(_.prepend(tagFrom))
|
.changeFrom(_.prepend(tagFrom))
|
||||||
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
||||||
@ -334,14 +374,17 @@ object QItem {
|
|||||||
} yield existing ++ other.map(n => CategoryCount(n.some, 0))
|
} yield existing ++ other.map(n => CategoryCount(n.some, 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
def searchTagSummary(today: LocalDate)(q: Query): ConnectionIO[List[TagCount]] = {
|
def searchTagSummary(today: LocalDate, ftsTable: Option[RFtsResult.Table])(
|
||||||
|
q: Query
|
||||||
|
): ConnectionIO[List[TagCount]] = {
|
||||||
val tagFrom =
|
val tagFrom =
|
||||||
from(ti)
|
from(ti)
|
||||||
.innerJoin(tag, tag.tid === ti.tagId)
|
.innerJoin(tag, tag.tid === ti.tagId)
|
||||||
.innerJoin(i, i.id === ti.itemId)
|
.innerJoin(i, i.id === ti.itemId)
|
||||||
|
|
||||||
val tagCloud =
|
val tagCloud =
|
||||||
findItemsBase(q.fix, today, 0).unwrap
|
findItemsBase(q.fix, today, 0, None).unwrap
|
||||||
|
.joinFtsIdOnly(i, ftsTable)
|
||||||
.withSelect(select(tag.all).append(countDistinct(i.id).as("num")))
|
.withSelect(select(tag.all).append(countDistinct(i.id).as("num")))
|
||||||
.changeFrom(_.prepend(tagFrom))
|
.changeFrom(_.prepend(tagFrom))
|
||||||
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
||||||
@ -358,39 +401,46 @@ object QItem {
|
|||||||
} yield existing ++ other.map(TagCount(_, 0))
|
} yield existing ++ other.map(TagCount(_, 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
def searchCountSummary(today: LocalDate)(q: Query): ConnectionIO[Int] =
|
def searchCountSummary(today: LocalDate, ftsTable: Option[RFtsResult.Table])(
|
||||||
findItemsBase(q.fix, today, 0).unwrap
|
q: Query
|
||||||
|
): ConnectionIO[Int] =
|
||||||
|
findItemsBase(q.fix, today, 0, None).unwrap
|
||||||
|
.joinFtsIdOnly(i, ftsTable)
|
||||||
.withSelect(Nel.of(count(i.id).as("num")))
|
.withSelect(Nel.of(count(i.id).as("num")))
|
||||||
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
||||||
.build
|
.build
|
||||||
.query[Int]
|
.query[Int]
|
||||||
.unique
|
.unique
|
||||||
|
|
||||||
def searchCorrOrgSummary(today: LocalDate)(q: Query): ConnectionIO[List[IdRefCount]] =
|
def searchCorrOrgSummary(today: LocalDate, ftsTable: Option[RFtsResult.Table])(
|
||||||
searchIdRefSummary(org.oid, org.name, i.corrOrg, today)(q)
|
|
||||||
|
|
||||||
def searchCorrPersonSummary(today: LocalDate)(
|
|
||||||
q: Query
|
q: Query
|
||||||
): ConnectionIO[List[IdRefCount]] =
|
): ConnectionIO[List[IdRefCount]] =
|
||||||
searchIdRefSummary(pers0.pid, pers0.name, i.corrPerson, today)(q)
|
searchIdRefSummary(org.oid, org.name, i.corrOrg, today, ftsTable)(q)
|
||||||
|
|
||||||
def searchConcPersonSummary(today: LocalDate)(
|
def searchCorrPersonSummary(today: LocalDate, ftsTable: Option[RFtsResult.Table])(
|
||||||
q: Query
|
q: Query
|
||||||
): ConnectionIO[List[IdRefCount]] =
|
): ConnectionIO[List[IdRefCount]] =
|
||||||
searchIdRefSummary(pers1.pid, pers1.name, i.concPerson, today)(q)
|
searchIdRefSummary(pers0.pid, pers0.name, i.corrPerson, today, ftsTable)(q)
|
||||||
|
|
||||||
def searchConcEquipSummary(today: LocalDate)(
|
def searchConcPersonSummary(today: LocalDate, ftsTable: Option[RFtsResult.Table])(
|
||||||
q: Query
|
q: Query
|
||||||
): ConnectionIO[List[IdRefCount]] =
|
): ConnectionIO[List[IdRefCount]] =
|
||||||
searchIdRefSummary(equip.eid, equip.name, i.concEquipment, today)(q)
|
searchIdRefSummary(pers1.pid, pers1.name, i.concPerson, today, ftsTable)(q)
|
||||||
|
|
||||||
|
def searchConcEquipSummary(today: LocalDate, ftsTable: Option[RFtsResult.Table])(
|
||||||
|
q: Query
|
||||||
|
): ConnectionIO[List[IdRefCount]] =
|
||||||
|
searchIdRefSummary(equip.eid, equip.name, i.concEquipment, today, ftsTable)(q)
|
||||||
|
|
||||||
private def searchIdRefSummary(
|
private def searchIdRefSummary(
|
||||||
idCol: Column[Ident],
|
idCol: Column[Ident],
|
||||||
nameCol: Column[String],
|
nameCol: Column[String],
|
||||||
fkCol: Column[Ident],
|
fkCol: Column[Ident],
|
||||||
today: LocalDate
|
today: LocalDate,
|
||||||
|
ftsTable: Option[RFtsResult.Table]
|
||||||
)(q: Query): ConnectionIO[List[IdRefCount]] =
|
)(q: Query): ConnectionIO[List[IdRefCount]] =
|
||||||
findItemsBase(q.fix, today, 0).unwrap
|
findItemsBase(q.fix, today, 0, None).unwrap
|
||||||
|
.joinFtsIdOnly(i, ftsTable)
|
||||||
.withSelect(select(idCol, nameCol).append(count(idCol).as("num")))
|
.withSelect(select(idCol, nameCol).append(count(idCol).as("num")))
|
||||||
.changeWhere(c =>
|
.changeWhere(c =>
|
||||||
c && fkCol.isNotNull && queryCondition(today, q.fix.account.collective, q.cond)
|
c && fkCol.isNotNull && queryCondition(today, q.fix.account.collective, q.cond)
|
||||||
@ -400,9 +450,12 @@ object QItem {
|
|||||||
.query[IdRefCount]
|
.query[IdRefCount]
|
||||||
.to[List]
|
.to[List]
|
||||||
|
|
||||||
def searchFolderSummary(today: LocalDate)(q: Query): ConnectionIO[List[FolderCount]] = {
|
def searchFolderSummary(today: LocalDate, ftsTable: Option[RFtsResult.Table])(
|
||||||
|
q: Query
|
||||||
|
): ConnectionIO[List[FolderCount]] = {
|
||||||
val fu = RUser.as("fu")
|
val fu = RUser.as("fu")
|
||||||
findItemsBase(q.fix, today, 0).unwrap
|
findItemsBase(q.fix, today, 0, None).unwrap
|
||||||
|
.joinFtsIdOnly(i, ftsTable)
|
||||||
.withSelect(select(f.id, f.name, f.owner, fu.login).append(count(i.id).as("num")))
|
.withSelect(select(f.id, f.name, f.owner, fu.login).append(count(i.id).as("num")))
|
||||||
.changeFrom(_.innerJoin(fu, fu.uid === f.owner))
|
.changeFrom(_.innerJoin(fu, fu.uid === f.owner))
|
||||||
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
||||||
@ -412,16 +465,19 @@ object QItem {
|
|||||||
.to[List]
|
.to[List]
|
||||||
}
|
}
|
||||||
|
|
||||||
def searchFieldSummary(today: LocalDate)(q: Query): ConnectionIO[List[FieldStats]] = {
|
def searchFieldSummary(today: LocalDate, ftsTable: Option[RFtsResult.Table])(
|
||||||
|
q: Query
|
||||||
|
): ConnectionIO[List[FieldStats]] = {
|
||||||
val fieldJoin =
|
val fieldJoin =
|
||||||
from(cv)
|
from(cv)
|
||||||
.innerJoin(cf, cf.id === cv.field)
|
.innerJoin(cf, cf.id === cv.field)
|
||||||
.innerJoin(i, i.id === cv.itemId)
|
.innerJoin(i, i.id === cv.itemId)
|
||||||
|
|
||||||
val base =
|
val base =
|
||||||
findItemsBase(q.fix, today, 0).unwrap
|
findItemsBase(q.fix, today, 0, None).unwrap
|
||||||
.changeFrom(_.prepend(fieldJoin))
|
.changeFrom(_.prepend(fieldJoin))
|
||||||
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
.changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond))
|
||||||
|
.ftsCondition(i, ftsTable)
|
||||||
.groupBy(GroupBy(cf.all))
|
.groupBy(GroupBy(cf.all))
|
||||||
|
|
||||||
val basicFields = Nel.of(
|
val basicFields = Nel.of(
|
||||||
@ -498,7 +554,7 @@ object QItem {
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
val from = findItemsBase(q.fix, today, maxNoteLen)
|
val from = findItemsBase(q.fix, today, maxNoteLen, None)
|
||||||
.appendCte(cte)
|
.appendCte(cte)
|
||||||
.appendSelect(Tids.weight.s)
|
.appendSelect(Tids.weight.s)
|
||||||
.changeFrom(_.innerJoin(Tids, Tids.itemId === i.id))
|
.changeFrom(_.innerJoin(Tids, Tids.itemId === i.id))
|
||||||
|
@ -8,7 +8,9 @@ package docspell.store.queries
|
|||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.query.ItemQuery
|
import docspell.query.ItemQuery
|
||||||
import docspell.store.qb.Column
|
import docspell.store.fts.RFtsResult
|
||||||
|
import docspell.store.qb.DSL._
|
||||||
|
import docspell.store.qb.{Column, OrderBy}
|
||||||
import docspell.store.records.RItem
|
import docspell.store.records.RItem
|
||||||
|
|
||||||
case class Query(fix: Query.Fix, cond: Query.QueryCond) {
|
case class Query(fix: Query.Fix, cond: Query.QueryCond) {
|
||||||
@ -16,7 +18,7 @@ case class Query(fix: Query.Fix, cond: Query.QueryCond) {
|
|||||||
copy(cond = f(cond))
|
copy(cond = f(cond))
|
||||||
|
|
||||||
def withOrder(orderAsc: RItem.Table => Column[_]): Query =
|
def withOrder(orderAsc: RItem.Table => Column[_]): Query =
|
||||||
withFix(_.copy(orderAsc = Some(orderAsc)))
|
withFix(_.copy(order = Some(_.byItemColumnAsc(orderAsc))))
|
||||||
|
|
||||||
def withFix(f: Query.Fix => Query.Fix): Query =
|
def withFix(f: Query.Fix => Query.Fix): Query =
|
||||||
copy(fix = f(fix))
|
copy(fix = f(fix))
|
||||||
@ -29,6 +31,19 @@ case class Query(fix: Query.Fix, cond: Query.QueryCond) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
object Query {
|
object Query {
|
||||||
|
trait OrderSelect {
|
||||||
|
def item: RItem.Table
|
||||||
|
def fts: Option[RFtsResult.Table]
|
||||||
|
|
||||||
|
def byDefault: OrderBy =
|
||||||
|
OrderBy.desc(coalesce(item.itemDate.s, item.created.s).s)
|
||||||
|
|
||||||
|
def byItemColumnAsc(f: RItem.Table => Column[_]): OrderBy =
|
||||||
|
OrderBy.asc(coalesce(f(item).s, item.created.s).s)
|
||||||
|
|
||||||
|
def byScore: OrderBy =
|
||||||
|
fts.map(t => OrderBy.desc(t.score.s)).getOrElse(byDefault)
|
||||||
|
}
|
||||||
|
|
||||||
def apply(fix: Fix): Query =
|
def apply(fix: Fix): Query =
|
||||||
Query(fix, QueryExpr(None))
|
Query(fix, QueryExpr(None))
|
||||||
@ -36,7 +51,7 @@ object Query {
|
|||||||
case class Fix(
|
case class Fix(
|
||||||
account: AccountId,
|
account: AccountId,
|
||||||
query: Option[ItemQuery.Expr],
|
query: Option[ItemQuery.Expr],
|
||||||
orderAsc: Option[RItem.Table => Column[_]]
|
order: Option[OrderSelect => OrderBy]
|
||||||
) {
|
) {
|
||||||
|
|
||||||
def isEmpty: Boolean =
|
def isEmpty: Boolean =
|
||||||
|
102
modules/store/src/test/scala/docspell/store/DatabaseTest.scala
Normal file
102
modules/store/src/test/scala/docspell/store/DatabaseTest.scala
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store
|
||||||
|
|
||||||
|
import java.util.UUID
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.logging.TestLoggingConfig
|
||||||
|
|
||||||
|
import com.dimafeng.testcontainers.munit.fixtures.TestContainersFixtures
|
||||||
|
import com.dimafeng.testcontainers.{
|
||||||
|
JdbcDatabaseContainer,
|
||||||
|
MariaDBContainer,
|
||||||
|
PostgreSQLContainer
|
||||||
|
}
|
||||||
|
import doobie._
|
||||||
|
import munit.CatsEffectSuite
|
||||||
|
import org.testcontainers.utility.DockerImageName
|
||||||
|
|
||||||
|
trait DatabaseTest
|
||||||
|
extends CatsEffectSuite
|
||||||
|
with TestContainersFixtures
|
||||||
|
with TestLoggingConfig {
|
||||||
|
|
||||||
|
val cio: Sync[ConnectionIO] = Sync[ConnectionIO]
|
||||||
|
|
||||||
|
lazy val mariadbCnt = ForAllContainerFixture(
|
||||||
|
MariaDBContainer.Def(DockerImageName.parse("mariadb:10.5")).createContainer()
|
||||||
|
)
|
||||||
|
|
||||||
|
lazy val postgresCnt = ForAllContainerFixture(
|
||||||
|
PostgreSQLContainer.Def(DockerImageName.parse("postgres:14")).createContainer()
|
||||||
|
)
|
||||||
|
|
||||||
|
lazy val pgDataSource = ResourceSuiteLocalFixture(
|
||||||
|
"pgDataSource",
|
||||||
|
DatabaseTest.makeDataSourceFixture(IO(postgresCnt()))
|
||||||
|
)
|
||||||
|
|
||||||
|
lazy val mariaDataSource = ResourceSuiteLocalFixture(
|
||||||
|
"mariaDataSource",
|
||||||
|
DatabaseTest.makeDataSourceFixture(IO(mariadbCnt()))
|
||||||
|
)
|
||||||
|
|
||||||
|
lazy val h2DataSource = ResourceSuiteLocalFixture(
|
||||||
|
"h2DataSource", {
|
||||||
|
val jdbc = StoreFixture.memoryDB("test")
|
||||||
|
StoreFixture.dataSource(jdbc).map(ds => (jdbc, ds))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
lazy val newH2DataSource = ResourceFixture(for {
|
||||||
|
jdbc <- Resource.eval(IO(StoreFixture.memoryDB(UUID.randomUUID().toString)))
|
||||||
|
ds <- StoreFixture.dataSource(jdbc)
|
||||||
|
} yield (jdbc, ds))
|
||||||
|
|
||||||
|
lazy val pgStore = ResourceSuiteLocalFixture(
|
||||||
|
"pgStore",
|
||||||
|
for {
|
||||||
|
t <- Resource.eval(IO(pgDataSource()))
|
||||||
|
store <- StoreFixture.store(t._2, t._1)
|
||||||
|
} yield store
|
||||||
|
)
|
||||||
|
|
||||||
|
lazy val mariaStore = ResourceSuiteLocalFixture(
|
||||||
|
"mariaStore",
|
||||||
|
for {
|
||||||
|
t <- Resource.eval(IO(mariaDataSource()))
|
||||||
|
store <- StoreFixture.store(t._2, t._1)
|
||||||
|
} yield store
|
||||||
|
)
|
||||||
|
|
||||||
|
lazy val h2Store = ResourceSuiteLocalFixture(
|
||||||
|
"h2Store",
|
||||||
|
for {
|
||||||
|
t <- Resource.eval(IO(h2DataSource()))
|
||||||
|
store <- StoreFixture.store(t._2, t._1)
|
||||||
|
} yield store
|
||||||
|
)
|
||||||
|
|
||||||
|
def postgresAll = List(postgresCnt, pgDataSource, pgStore)
|
||||||
|
def mariaDbAll = List(mariadbCnt, mariaDataSource, mariaStore)
|
||||||
|
def h2All = List(h2DataSource, h2Store)
|
||||||
|
}
|
||||||
|
|
||||||
|
object DatabaseTest {
|
||||||
|
private def jdbcConfig(cnt: JdbcDatabaseContainer) =
|
||||||
|
JdbcConfig(LenientUri.unsafe(cnt.jdbcUrl), cnt.username, cnt.password)
|
||||||
|
|
||||||
|
private def makeDataSourceFixture(cnt: IO[JdbcDatabaseContainer]) =
|
||||||
|
for {
|
||||||
|
c <- Resource.eval(cnt)
|
||||||
|
jdbc <- Resource.pure(jdbcConfig(c))
|
||||||
|
ds <- StoreFixture.dataSource(jdbc)
|
||||||
|
} yield (jdbc, ds)
|
||||||
|
}
|
@ -4,7 +4,8 @@
|
|||||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package docspell.store.migrate
|
package docspell.store
|
||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.effect.unsafe.implicits._
|
import cats.effect.unsafe.implicits._
|
||||||
|
|
@ -57,29 +57,27 @@ object StoreFixture {
|
|||||||
|
|
||||||
def dataSource(jdbc: JdbcConfig): Resource[IO, JdbcConnectionPool] = {
|
def dataSource(jdbc: JdbcConfig): Resource[IO, JdbcConnectionPool] = {
|
||||||
def jdbcConnPool =
|
def jdbcConnPool =
|
||||||
jdbc.dbmsName match {
|
jdbc.dbms match {
|
||||||
case Some("mariadb") =>
|
case Db.MariaDB =>
|
||||||
val ds = new MariaDbDataSource()
|
val ds = new MariaDbDataSource()
|
||||||
ds.setUrl(jdbc.url.asString)
|
ds.setUrl(jdbc.url.asString)
|
||||||
ds.setUser(jdbc.user)
|
ds.setUser(jdbc.user)
|
||||||
ds.setPassword(jdbc.password)
|
ds.setPassword(jdbc.password)
|
||||||
JdbcConnectionPool.create(ds)
|
JdbcConnectionPool.create(ds)
|
||||||
|
|
||||||
case Some("postgresql") =>
|
case Db.PostgreSQL =>
|
||||||
val ds = new PGConnectionPoolDataSource()
|
val ds = new PGConnectionPoolDataSource()
|
||||||
ds.setURL(jdbc.url.asString)
|
ds.setURL(jdbc.url.asString)
|
||||||
ds.setUser(jdbc.user)
|
ds.setUser(jdbc.user)
|
||||||
ds.setPassword(jdbc.password)
|
ds.setPassword(jdbc.password)
|
||||||
JdbcConnectionPool.create(ds)
|
JdbcConnectionPool.create(ds)
|
||||||
|
|
||||||
case Some("h2") =>
|
case Db.H2 =>
|
||||||
val ds = new JdbcDataSource()
|
val ds = new JdbcDataSource()
|
||||||
ds.setURL(jdbc.url.asString)
|
ds.setURL(jdbc.url.asString)
|
||||||
ds.setUser(jdbc.user)
|
ds.setUser(jdbc.user)
|
||||||
ds.setPassword(jdbc.password)
|
ds.setPassword(jdbc.password)
|
||||||
JdbcConnectionPool.create(ds)
|
JdbcConnectionPool.create(ds)
|
||||||
|
|
||||||
case n => sys.error(s"Unknown db name: $n")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Resource.make(IO(jdbcConnPool))(cp => IO(cp.dispose()))
|
Resource.make(IO(jdbcConnPool))(cp => IO(cp.dispose()))
|
||||||
@ -92,8 +90,10 @@ object StoreFixture {
|
|||||||
} yield xa
|
} yield xa
|
||||||
|
|
||||||
def store(jdbc: JdbcConfig): Resource[IO, StoreImpl[IO]] =
|
def store(jdbc: JdbcConfig): Resource[IO, StoreImpl[IO]] =
|
||||||
|
dataSource(jdbc).flatMap(store(_, jdbc))
|
||||||
|
|
||||||
|
def store(ds: DataSource, jdbc: JdbcConfig): Resource[IO, StoreImpl[IO]] =
|
||||||
for {
|
for {
|
||||||
ds <- dataSource(jdbc)
|
|
||||||
xa <- makeXA(ds)
|
xa <- makeXA(ds)
|
||||||
cfg = FileRepositoryConfig.Database(64 * 1024)
|
cfg = FileRepositoryConfig.Database(64 * 1024)
|
||||||
fr = FileRepository[IO](xa, ds, cfg, true)
|
fr = FileRepository[IO](xa, ds, cfg, true)
|
||||||
|
@ -0,0 +1,198 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.fts
|
||||||
|
|
||||||
|
import java.time.LocalDate
|
||||||
|
|
||||||
|
import cats.effect.IO
|
||||||
|
import cats.syntax.option._
|
||||||
|
import cats.syntax.traverse._
|
||||||
|
import fs2.Stream
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.ftsclient.FtsResult
|
||||||
|
import docspell.ftsclient.FtsResult.{AttachmentData, ItemMatch}
|
||||||
|
import docspell.store._
|
||||||
|
import docspell.store.qb.DSL._
|
||||||
|
import docspell.store.qb._
|
||||||
|
import docspell.store.queries.{QItem, Query}
|
||||||
|
import docspell.store.records.{RCollective, RItem}
|
||||||
|
|
||||||
|
import doobie._
|
||||||
|
|
||||||
|
class TempFtsOpsTest extends DatabaseTest {
|
||||||
|
private[this] val logger = docspell.logging.getLogger[IO]
|
||||||
|
|
||||||
|
override def munitFixtures = postgresAll ++ mariaDbAll ++ h2All
|
||||||
|
|
||||||
|
def id(str: String): Ident = Ident.unsafe(str)
|
||||||
|
|
||||||
|
def stores: (Store[IO], Store[IO], Store[IO]) =
|
||||||
|
(pgStore(), mariaStore(), h2Store())
|
||||||
|
|
||||||
|
test("create temporary table") {
|
||||||
|
val (pg, maria, h2) = stores
|
||||||
|
for {
|
||||||
|
_ <- assertCreateTempTable(pg)
|
||||||
|
_ <- assertCreateTempTable(maria)
|
||||||
|
_ <- assertCreateTempTable(h2)
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
test("query items sql") {
|
||||||
|
val (pg, maria, h2) = stores
|
||||||
|
for {
|
||||||
|
_ <- prepareItems(pg)
|
||||||
|
_ <- prepareItems(maria)
|
||||||
|
_ <- prepareItems(h2)
|
||||||
|
_ <- assertQueryItem(pg, ftsResults(10, 10))
|
||||||
|
// _ <- assertQueryItem(pg, ftsResults(3000, 500))
|
||||||
|
_ <- assertQueryItem(maria, ftsResults(10, 10))
|
||||||
|
// _ <- assertQueryItem(maria, ftsResults(3000, 500))
|
||||||
|
_ <- assertQueryItem(h2, ftsResults(10, 10))
|
||||||
|
// _ <- assertQueryItem(h2, ftsResults(3000, 500))
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
def prepareItems(store: Store[IO]) =
|
||||||
|
for {
|
||||||
|
_ <- store.transact(RCollective.insert(makeCollective(DocspellSystem.user)))
|
||||||
|
items = (0 until 200)
|
||||||
|
.map(makeItem(_, DocspellSystem.user))
|
||||||
|
.toList
|
||||||
|
_ <- items.traverse(i => store.transact(RItem.insert(i)))
|
||||||
|
} yield ()
|
||||||
|
|
||||||
|
def assertCreateTempTable(store: Store[IO]) = {
|
||||||
|
val insertRows =
|
||||||
|
List(
|
||||||
|
RFtsResult(id("abc-def"), None, None),
|
||||||
|
RFtsResult(id("abc-123"), Some(1.56), None),
|
||||||
|
RFtsResult(id("zyx-321"), None, None)
|
||||||
|
)
|
||||||
|
val create =
|
||||||
|
for {
|
||||||
|
table <- TempFtsOps.createTable(store.dbms, "tt")
|
||||||
|
n <- table.insertAll(insertRows)
|
||||||
|
_ <- table.createIndex
|
||||||
|
rows <- Select(select(table.all), from(table))
|
||||||
|
.orderBy(table.id)
|
||||||
|
.build
|
||||||
|
.query[RFtsResult]
|
||||||
|
.to[List]
|
||||||
|
} yield (n, rows)
|
||||||
|
|
||||||
|
val verify =
|
||||||
|
store.transact(create).map { case (inserted, rows) =>
|
||||||
|
if (store.dbms != Db.MariaDB) {
|
||||||
|
assertEquals(inserted, 3)
|
||||||
|
}
|
||||||
|
assertEquals(rows, insertRows.sortBy(_.id))
|
||||||
|
}
|
||||||
|
|
||||||
|
verify *> verify
|
||||||
|
}
|
||||||
|
|
||||||
|
def assertQueryItem(store: Store[IO], ftsResults: Stream[ConnectionIO, FtsResult]) =
|
||||||
|
for {
|
||||||
|
today <- IO(LocalDate.now())
|
||||||
|
account = DocspellSystem.account
|
||||||
|
tempTable = ftsResults
|
||||||
|
.through(TempFtsOps.prepareTable(store.dbms, "fts_result"))
|
||||||
|
.compile
|
||||||
|
.lastOrError
|
||||||
|
q = Query(Query.Fix(account, None, None), Query.QueryExpr(None))
|
||||||
|
timed <- Duration.stopTime[IO]
|
||||||
|
items <- store
|
||||||
|
.transact(
|
||||||
|
tempTable.flatMap(t =>
|
||||||
|
QItem
|
||||||
|
.queryItems(q, today, 0, Batch.limit(10), t.some)
|
||||||
|
.compile
|
||||||
|
.to(List)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
duration <- timed
|
||||||
|
_ <- logger.info(s"Join took: ${duration.formatExact}")
|
||||||
|
|
||||||
|
} yield {
|
||||||
|
assert(items.nonEmpty)
|
||||||
|
assert(items.head.context.isDefined)
|
||||||
|
}
|
||||||
|
|
||||||
|
def ftsResult(start: Int, end: Int): FtsResult = {
|
||||||
|
def matchData(n: Int): List[ItemMatch] =
|
||||||
|
List(
|
||||||
|
ItemMatch(
|
||||||
|
id(s"m$n"),
|
||||||
|
id(s"item-$n"),
|
||||||
|
DocspellSystem.user,
|
||||||
|
math.random(),
|
||||||
|
FtsResult.ItemData
|
||||||
|
),
|
||||||
|
ItemMatch(
|
||||||
|
id(s"m$n-1"),
|
||||||
|
id(s"item-$n"),
|
||||||
|
DocspellSystem.user,
|
||||||
|
math.random(),
|
||||||
|
AttachmentData(id(s"item-$n-attach-1"), "attachment.pdf")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
val hl =
|
||||||
|
(start until end)
|
||||||
|
.flatMap(n =>
|
||||||
|
List(
|
||||||
|
id(s"m$n-1") -> List("this *a test* please"),
|
||||||
|
id(s"m$n") -> List("only **items** here")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.toMap
|
||||||
|
|
||||||
|
FtsResult.empty
|
||||||
|
.copy(
|
||||||
|
count = end,
|
||||||
|
highlight = hl,
|
||||||
|
results = (start until end).toList.flatMap(matchData)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def ftsResults(len: Int, chunkSize: Int): Stream[ConnectionIO, FtsResult] = {
|
||||||
|
val chunks = len / chunkSize
|
||||||
|
Stream.range(0, chunks).map { n =>
|
||||||
|
val start = n * chunkSize
|
||||||
|
val end = start + chunkSize
|
||||||
|
ftsResult(start, end)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def makeCollective(cid: Ident): RCollective =
|
||||||
|
RCollective(cid, CollectiveState.Active, Language.English, true, ts)
|
||||||
|
|
||||||
|
def makeItem(n: Int, cid: Ident): RItem =
|
||||||
|
RItem(
|
||||||
|
id(s"item-$n"),
|
||||||
|
cid,
|
||||||
|
s"item $n",
|
||||||
|
None,
|
||||||
|
"test",
|
||||||
|
Direction.Incoming,
|
||||||
|
ItemState.Created,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
ts,
|
||||||
|
ts,
|
||||||
|
None,
|
||||||
|
None
|
||||||
|
)
|
||||||
|
|
||||||
|
val ts = Timestamp.ofMillis(1654329963743L)
|
||||||
|
}
|
@ -1,49 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2020 Eike K. & Contributors
|
|
||||||
*
|
|
||||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
*/
|
|
||||||
|
|
||||||
package docspell.store.migrate
|
|
||||||
|
|
||||||
import cats.effect.IO
|
|
||||||
import cats.effect.unsafe.implicits._
|
|
||||||
|
|
||||||
import docspell.logging.TestLoggingConfig
|
|
||||||
import docspell.store.{SchemaMigrateConfig, StoreFixture}
|
|
||||||
|
|
||||||
import munit.FunSuite
|
|
||||||
|
|
||||||
class H2MigrateTest extends FunSuite with TestLoggingConfig {
|
|
||||||
|
|
||||||
test("h2 empty schema migration") {
|
|
||||||
val jdbc = StoreFixture.memoryDB("h2test")
|
|
||||||
val ds = StoreFixture.dataSource(jdbc)
|
|
||||||
val result =
|
|
||||||
ds.flatMap(StoreFixture.makeXA).use { xa =>
|
|
||||||
FlywayMigrate[IO](jdbc, SchemaMigrateConfig.defaults, xa).run
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(result.unsafeRunSync().migrationsExecuted > 0)
|
|
||||||
|
|
||||||
// a second time to apply fixup migrations
|
|
||||||
assert(result.unsafeRunSync().migrationsExecuted == 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
test("h2 upgrade db from 0.24.0") {
|
|
||||||
val dump = "/docspell-0.24.0-dump-h2-1.24.0-2021-07-13-2307.sql"
|
|
||||||
|
|
||||||
val jdbc = StoreFixture.memoryDB("h2test2")
|
|
||||||
val ds = StoreFixture.dataSource(jdbc)
|
|
||||||
|
|
||||||
ds.use(StoreFixture.restoreH2Dump(dump, _)).unsafeRunSync()
|
|
||||||
|
|
||||||
val result =
|
|
||||||
ds.flatMap(StoreFixture.makeXA).use { xa =>
|
|
||||||
FlywayMigrate[IO](jdbc, SchemaMigrateConfig.defaults, xa).run
|
|
||||||
}
|
|
||||||
|
|
||||||
result.unsafeRunSync()
|
|
||||||
result.unsafeRunSync()
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,42 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2020 Eike K. & Contributors
|
|
||||||
*
|
|
||||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
*/
|
|
||||||
|
|
||||||
package docspell.store.migrate
|
|
||||||
|
|
||||||
import cats.effect._
|
|
||||||
import cats.effect.unsafe.implicits._
|
|
||||||
|
|
||||||
import docspell.common.LenientUri
|
|
||||||
import docspell.logging.TestLoggingConfig
|
|
||||||
import docspell.store.{JdbcConfig, SchemaMigrateConfig, StoreFixture}
|
|
||||||
|
|
||||||
import com.dimafeng.testcontainers.MariaDBContainer
|
|
||||||
import com.dimafeng.testcontainers.munit.TestContainerForAll
|
|
||||||
import munit._
|
|
||||||
import org.testcontainers.utility.DockerImageName
|
|
||||||
|
|
||||||
class MariaDbMigrateTest
|
|
||||||
extends FunSuite
|
|
||||||
with TestContainerForAll
|
|
||||||
with TestLoggingConfig {
|
|
||||||
override val containerDef: MariaDBContainer.Def =
|
|
||||||
MariaDBContainer.Def(DockerImageName.parse("mariadb:10.5"))
|
|
||||||
|
|
||||||
test("mariadb empty schema migration") {
|
|
||||||
assume(Docker.existsUnsafe, "docker doesn't exist!")
|
|
||||||
withContainers { cnt =>
|
|
||||||
val jdbc =
|
|
||||||
JdbcConfig(LenientUri.unsafe(cnt.jdbcUrl), cnt.dbUsername, cnt.dbPassword)
|
|
||||||
val ds = StoreFixture.dataSource(jdbc)
|
|
||||||
val result = ds.flatMap(StoreFixture.makeXA).use { xa =>
|
|
||||||
FlywayMigrate[IO](jdbc, SchemaMigrateConfig.defaults, xa).run
|
|
||||||
}
|
|
||||||
assert(result.unsafeRunSync().migrationsExecuted > 0)
|
|
||||||
// a second time to apply fixup migrations
|
|
||||||
assert(result.unsafeRunSync().migrationsExecuted == 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -0,0 +1,75 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.migrate
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
|
||||||
|
import docspell.store.{DatabaseTest, SchemaMigrateConfig, StoreFixture}
|
||||||
|
|
||||||
|
import org.flywaydb.core.api.output.MigrateResult
|
||||||
|
|
||||||
|
class MigrateTest extends DatabaseTest {
|
||||||
|
|
||||||
|
// don't register store-Fixture as this would run the migrations already
|
||||||
|
override def munitFixtures =
|
||||||
|
List(postgresCnt, mariadbCnt, pgDataSource, mariaDataSource, h2DataSource)
|
||||||
|
|
||||||
|
test("postgres empty schema migration") {
|
||||||
|
val (jdbc, ds) = pgDataSource()
|
||||||
|
val result =
|
||||||
|
StoreFixture.makeXA(ds).use { xa =>
|
||||||
|
FlywayMigrate[IO](jdbc, SchemaMigrateConfig.defaults, xa).run
|
||||||
|
}
|
||||||
|
|
||||||
|
assertMigrationResult(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
test("mariadb empty schema migration") {
|
||||||
|
val (jdbc, ds) = mariaDataSource()
|
||||||
|
val result =
|
||||||
|
StoreFixture.makeXA(ds).use { xa =>
|
||||||
|
FlywayMigrate[IO](jdbc, SchemaMigrateConfig.defaults, xa).run
|
||||||
|
}
|
||||||
|
|
||||||
|
assertMigrationResult(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
test("h2 empty schema migration") {
|
||||||
|
val (jdbc, ds) = h2DataSource()
|
||||||
|
val result =
|
||||||
|
StoreFixture.makeXA(ds).use { xa =>
|
||||||
|
FlywayMigrate[IO](jdbc, SchemaMigrateConfig.defaults, xa).run
|
||||||
|
}
|
||||||
|
|
||||||
|
assertMigrationResult(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
newH2DataSource.test("h2 upgrade db from 0.24.0") { case (jdbc, ds) =>
|
||||||
|
val dump = "/docspell-0.24.0-dump-h2-1.24.0-2021-07-13-2307.sql"
|
||||||
|
for {
|
||||||
|
_ <- StoreFixture.restoreH2Dump(dump, ds)
|
||||||
|
|
||||||
|
result =
|
||||||
|
StoreFixture.makeXA(ds).use { xa =>
|
||||||
|
FlywayMigrate[IO](jdbc, SchemaMigrateConfig.defaults, xa).run
|
||||||
|
}
|
||||||
|
|
||||||
|
_ <- result
|
||||||
|
_ <- result
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
def assertMigrationResult(migrate: IO[MigrateResult]) =
|
||||||
|
for {
|
||||||
|
r1 <- migrate.map(_.migrationsExecuted)
|
||||||
|
// a second time to apply fixup migrations
|
||||||
|
r2 <- migrate.map(_.migrationsExecuted)
|
||||||
|
} yield {
|
||||||
|
assert(r1 > 0)
|
||||||
|
assertEquals(r2, 0)
|
||||||
|
}
|
||||||
|
}
|
@ -1,45 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2020 Eike K. & Contributors
|
|
||||||
*
|
|
||||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
*/
|
|
||||||
|
|
||||||
package docspell.store.migrate
|
|
||||||
|
|
||||||
import cats.effect._
|
|
||||||
import cats.effect.unsafe.implicits._
|
|
||||||
|
|
||||||
import docspell.common.LenientUri
|
|
||||||
import docspell.logging.TestLoggingConfig
|
|
||||||
import docspell.store.{JdbcConfig, SchemaMigrateConfig, StoreFixture}
|
|
||||||
|
|
||||||
import com.dimafeng.testcontainers.PostgreSQLContainer
|
|
||||||
import com.dimafeng.testcontainers.munit.TestContainerForAll
|
|
||||||
import munit._
|
|
||||||
import org.testcontainers.utility.DockerImageName
|
|
||||||
|
|
||||||
class PostgresqlMigrateTest
|
|
||||||
extends FunSuite
|
|
||||||
with TestContainerForAll
|
|
||||||
with TestLoggingConfig {
|
|
||||||
override val containerDef: PostgreSQLContainer.Def =
|
|
||||||
PostgreSQLContainer.Def(DockerImageName.parse("postgres:14"))
|
|
||||||
|
|
||||||
test("postgres empty schema migration") {
|
|
||||||
assume(Docker.existsUnsafe, "docker doesn't exist!")
|
|
||||||
withContainers { cnt =>
|
|
||||||
val jdbc =
|
|
||||||
JdbcConfig(LenientUri.unsafe(cnt.jdbcUrl), cnt.username, cnt.password)
|
|
||||||
|
|
||||||
val ds = StoreFixture.dataSource(jdbc)
|
|
||||||
val result =
|
|
||||||
ds.flatMap(StoreFixture.makeXA).use { xa =>
|
|
||||||
FlywayMigrate[IO](jdbc, SchemaMigrateConfig.defaults, xa).run
|
|
||||||
}
|
|
||||||
assert(result.unsafeRunSync().migrationsExecuted > 0)
|
|
||||||
|
|
||||||
// a second time to apply fixup migrations
|
|
||||||
assert(result.unsafeRunSync().migrationsExecuted == 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -10,7 +10,9 @@ module Data.Items exposing
|
|||||||
, first
|
, first
|
||||||
, flatten
|
, flatten
|
||||||
, idSet
|
, idSet
|
||||||
|
, isEmpty
|
||||||
, length
|
, length
|
||||||
|
, nonEmpty
|
||||||
, replaceIn
|
, replaceIn
|
||||||
, unwrapGroups
|
, unwrapGroups
|
||||||
)
|
)
|
||||||
@ -23,6 +25,16 @@ import Set exposing (Set)
|
|||||||
import Util.List
|
import Util.List
|
||||||
|
|
||||||
|
|
||||||
|
isEmpty : ItemLightList -> Bool
|
||||||
|
isEmpty list =
|
||||||
|
List.all (.items >> List.isEmpty) list.groups
|
||||||
|
|
||||||
|
|
||||||
|
nonEmpty : ItemLightList -> Bool
|
||||||
|
nonEmpty list =
|
||||||
|
not (isEmpty list)
|
||||||
|
|
||||||
|
|
||||||
flatten : ItemLightList -> List ItemLight
|
flatten : ItemLightList -> List ItemLight
|
||||||
flatten list =
|
flatten list =
|
||||||
List.concatMap .items list.groups
|
List.concatMap .items list.groups
|
||||||
|
@ -209,7 +209,7 @@ update texts bookmarkId lastViewedItemId env msg model =
|
|||||||
{ model
|
{ model
|
||||||
| searchInProgress = False
|
| searchInProgress = False
|
||||||
, searchOffset = noff
|
, searchOffset = noff
|
||||||
, moreAvailable = list.groups /= []
|
, moreAvailable = Data.Items.nonEmpty list
|
||||||
}
|
}
|
||||||
in
|
in
|
||||||
makeResult env.selectedItems <|
|
makeResult env.selectedItems <|
|
||||||
@ -233,7 +233,7 @@ update texts bookmarkId lastViewedItemId env msg model =
|
|||||||
| searchInProgress = False
|
| searchInProgress = False
|
||||||
, moreInProgress = False
|
, moreInProgress = False
|
||||||
, searchOffset = noff
|
, searchOffset = noff
|
||||||
, moreAvailable = list.groups /= []
|
, moreAvailable = Data.Items.nonEmpty list
|
||||||
}
|
}
|
||||||
in
|
in
|
||||||
update texts bookmarkId lastViewedItemId env (ItemCardListMsg (Comp.ItemCardList.AddResults list)) m
|
update texts bookmarkId lastViewedItemId env (ItemCardListMsg (Comp.ItemCardList.AddResults list)) m
|
||||||
|
@ -161,11 +161,17 @@ unless one of the following is true:
|
|||||||
## The Query
|
## The Query
|
||||||
|
|
||||||
The query string for full text search is very powerful. Docspell
|
The query string for full text search is very powerful. Docspell
|
||||||
currently supports [Apache SOLR](https://solr.apache.org/) as
|
currently supports [Apache SOLR](https://solr.apache.org/) and
|
||||||
full text search backend, so you may want to have a look at their
|
[PostgreSQL](https://www.postgresql.org/docs/14/textsearch.html) as
|
||||||
[documentation on query
|
full text search backends. You may want to have a look at [SOLRs
|
||||||
|
documentation on query
|
||||||
syntax](https://solr.apache.org/guide/8_4/query-syntax-and-parsing.html#query-syntax-and-parsing)
|
syntax](https://solr.apache.org/guide/8_4/query-syntax-and-parsing.html#query-syntax-and-parsing)
|
||||||
for a in depth guide.
|
for a in depth guide for how to search with SOLR. PostgreSQL also has
|
||||||
|
[documentation](https://www.postgresql.org/docs/14/textsearch-controls.html#TEXTSEARCH-PARSING-QUERIES)
|
||||||
|
about parsing queries, Docspell by default uses
|
||||||
|
`websearch_to_tsquery`.
|
||||||
|
|
||||||
|
Here is a quick overview for SOLR queries:
|
||||||
|
|
||||||
- Wildcards: `?` matches any single character, `*` matches zero or
|
- Wildcards: `?` matches any single character, `*` matches zero or
|
||||||
more characters
|
more characters
|
||||||
|
Loading…
x
Reference in New Issue
Block a user