Remove unused code (search update)

This commit is contained in:
eikek
2022-06-05 22:06:22 +02:00
parent c6a9a17f89
commit 7ce6bc2f9d
19 changed files with 228 additions and 1157 deletions

View File

@ -70,9 +70,6 @@ docspell.server {
# In order to keep this low, a limit can be defined here.
max-note-length = 180
feature-search-2 = true
# This defines whether the classification form in the collective
# settings is displayed or not. If all joex instances have document
# classification disabled, it makes sense to hide its settings from

View File

@ -19,12 +19,16 @@ import docspell.backend.ops.OUpload.{UploadData, UploadMeta, UploadResult}
import docspell.backend.ops._
import docspell.common._
import docspell.common.syntax.all._
import docspell.ftsclient.FtsResult
import docspell.restapi.model._
import docspell.restserver.conv.Conversions._
import docspell.restserver.http4s.ContentDisposition
import docspell.store.qb.Batch
import docspell.store.queries.{AttachmentLight => QAttachmentLight, IdRefCount}
import docspell.store.queries.{
AttachmentLight => QAttachmentLight,
FieldStats => QFieldStats,
ItemFieldValue => QItemFieldValue,
TagCount => QTagCount,
_
}
import docspell.store.records._
import docspell.store.{AddResult, UpdateResult}
@ -34,7 +38,7 @@ import org.log4s.Logger
trait Conversions {
def mkSearchStats(sum: OItemSearch.SearchSummary): SearchStats =
def mkSearchStats(sum: SearchSummary): SearchStats =
SearchStats(
sum.count,
mkTagCloud(sum.tags),
@ -53,7 +57,7 @@ trait Conversions {
def mkFolderStats(fs: docspell.store.queries.FolderCount): FolderStats =
FolderStats(fs.id, fs.name, mkIdName(fs.owner), fs.count)
def mkFieldStats(fs: docspell.store.queries.FieldStats): FieldStats =
def mkFieldStats(fs: QFieldStats): FieldStats =
FieldStats(
fs.field.id,
fs.field.name,
@ -76,7 +80,7 @@ trait Conversions {
mkTagCloud(d.tags)
)
def mkTagCloud(tags: List[OCollective.TagCount]) =
def mkTagCloud(tags: List[QTagCount]) =
TagCloud(tags.map(tc => TagCount(mkTag(tc.tag), tc.count)))
def mkTagCategoryCloud(tags: List[OCollective.CategoryCount]) =
@ -144,7 +148,7 @@ trait Conversions {
data.relatedItems.map(mkItemLight).toList
)
def mkItemFieldValue(v: OItemSearch.ItemFieldValue): ItemFieldValue =
def mkItemFieldValue(v: QItemFieldValue): ItemFieldValue =
ItemFieldValue(v.fieldId, v.fieldName, v.fieldLabel, v.fieldType, v.value)
def mkAttachment(
@ -173,28 +177,13 @@ trait Conversions {
OItemSearch.CustomValue(v.field, v.value)
def mkItemList(
v: Vector[OItemSearch.ListItem],
v: Vector[ListItem],
batch: Batch,
capped: Boolean
): ItemLightList = {
val groups = v.groupBy(item => item.date.toUtcDate.toString.substring(0, 7))
def mkGroup(g: (String, Vector[OItemSearch.ListItem])): ItemLightGroup =
ItemLightGroup(g._1, g._2.map(mkItemLight).toList)
val gs =
groups.map(mkGroup).toList.sortWith((g1, g2) => g1.name.compareTo(g2.name) >= 0)
ItemLightList(gs, batch.limit, batch.offset, capped)
}
def mkItemListFts(
v: Vector[OFulltext.FtsItem],
batch: Batch,
capped: Boolean
): ItemLightList = {
val groups = v.groupBy(item => item.item.date.toUtcDate.toString.substring(0, 7))
def mkGroup(g: (String, Vector[OFulltext.FtsItem])): ItemLightGroup =
def mkGroup(g: (String, Vector[ListItem])): ItemLightGroup =
ItemLightGroup(g._1, g._2.map(mkItemLight).toList)
val gs =
@ -203,13 +192,13 @@ trait Conversions {
}
def mkItemListWithTags(
v: Vector[OItemSearch.ListItemWithTags],
v: Vector[ListItemWithTags],
batch: Batch,
capped: Boolean
): ItemLightList = {
val groups = v.groupBy(ti => ti.item.date.toUtcDate.toString.substring(0, 7))
def mkGroup(g: (String, Vector[OItemSearch.ListItemWithTags])): ItemLightGroup =
def mkGroup(g: (String, Vector[ListItemWithTags])): ItemLightGroup =
ItemLightGroup(g._1, g._2.map(mkItemLightWithTags).toList)
val gs =
@ -217,50 +206,7 @@ trait Conversions {
ItemLightList(gs, batch.limit, batch.offset, capped)
}
def mkItemListWithTagsFts(
v: Vector[OFulltext.FtsItemWithTags],
batch: Batch,
capped: Boolean
): ItemLightList = {
val groups = v.groupBy(ti => ti.item.item.date.toUtcDate.toString.substring(0, 7))
def mkGroup(g: (String, Vector[OFulltext.FtsItemWithTags])): ItemLightGroup =
ItemLightGroup(g._1, g._2.map(mkItemLightWithTags).toList)
val gs =
groups.map(mkGroup).toList.sortWith((g1, g2) => g1.name.compareTo(g2.name) >= 0)
ItemLightList(gs, batch.limit, batch.offset, capped)
}
def mkItemListWithTagsFtsPlain(
v: Vector[OFulltext.FtsItemWithTags],
batch: Batch,
capped: Boolean
): ItemLightList =
if (v.isEmpty) ItemLightList(Nil, batch.limit, batch.offset, capped)
else
ItemLightList(
List(ItemLightGroup("Results", v.map(mkItemLightWithTags).toList)),
batch.limit,
batch.offset,
capped
)
def mkItemListFtsPlain(
v: Vector[OFulltext.FtsItem],
batch: Batch,
capped: Boolean
): ItemLightList =
if (v.isEmpty) ItemLightList(Nil, batch.limit, batch.offset, capped)
else
ItemLightList(
List(ItemLightGroup("Results", v.map(mkItemLight).toList)),
batch.limit,
batch.offset,
capped
)
def mkItemLight(i: OItemSearch.ListItem): ItemLight =
def mkItemLight(i: ListItem): ItemLight =
ItemLight(
i.id,
i.name,
@ -282,13 +228,7 @@ trait Conversions {
Nil // highlight
)
def mkItemLight(i: OFulltext.FtsItem): ItemLight = {
val il = mkItemLight(i.item)
val highlight = mkHighlight(i.ftsData)
il.copy(highlighting = highlight)
}
def mkItemLightWithTags(i: OItemSearch.ListItemWithTags): ItemLight =
def mkItemLightWithTags(i: ListItemWithTags): ItemLight =
mkItemLight(i.item)
.copy(
tags = i.tags.map(mkTag),
@ -300,22 +240,6 @@ trait Conversions {
def mkAttachmentLight(qa: QAttachmentLight): AttachmentLight =
AttachmentLight(qa.id, qa.position, qa.name, qa.pageCount)
def mkItemLightWithTags(i: OFulltext.FtsItemWithTags): ItemLight = {
val il = mkItemLightWithTags(i.item)
val highlight = mkHighlight(i.ftsData)
il.copy(highlighting = highlight)
}
private def mkHighlight(ftsData: OFulltext.FtsData): List[HighlightEntry] =
ftsData.items.filter(_.context.nonEmpty).sortBy(-_.score).map { fdi =>
fdi.matchData match {
case FtsResult.AttachmentData(_, aName) =>
HighlightEntry(aName, fdi.context)
case FtsResult.ItemData =>
HighlightEntry("Item", fdi.context)
}
}
// job
def mkJobQueueState(state: OJob.CollectiveQueueState): JobQueueState = {
def desc(f: JobDetail => Option[Timestamp])(j1: JobDetail, j2: JobDetail): Boolean = {
@ -571,7 +495,7 @@ trait Conversions {
oid: Option[Ident],
pid: Option[Ident]
): F[RContact] =
timeId.map { case (id, now) =>
Conversions.timeId.map { case (id, now) =>
RContact(id, c.value.trim, c.kind, pid, oid, now)
}
@ -590,7 +514,7 @@ trait Conversions {
)
def newUser[F[_]: Sync](u: User, cid: Ident): F[RUser] =
timeId.map { case (id, now) =>
Conversions.timeId.map { case (id, now) =>
RUser(
id,
u.login,
@ -625,7 +549,7 @@ trait Conversions {
Tag(rt.tagId, rt.name, rt.category, rt.created)
def newTag[F[_]: Sync](t: Tag, cid: Ident): F[RTag] =
timeId.map { case (id, now) =>
Conversions.timeId.map { case (id, now) =>
RTag(id, cid, t.name.trim, t.category.map(_.trim), now)
}
@ -653,7 +577,7 @@ trait Conversions {
)
def newSource[F[_]: Sync](s: Source, cid: Ident): F[RSource] =
timeId.map { case (id, now) =>
Conversions.timeId.map { case (id, now) =>
RSource(
id,
cid,
@ -691,7 +615,7 @@ trait Conversions {
Equipment(re.eid, re.name, re.created, re.notes, re.use)
def newEquipment[F[_]: Sync](e: Equipment, cid: Ident): F[REquipment] =
timeId.map { case (id, now) =>
Conversions.timeId.map { case (id, now) =>
REquipment(id, cid, e.name.trim, now, now, e.notes, e.use)
}
@ -785,7 +709,7 @@ trait Conversions {
header.mediaType.mainType,
header.mediaType.subType,
None
).withCharsetName(header.mediaType.extensions.get("charset").getOrElse("unknown"))
).withCharsetName(header.mediaType.extensions.getOrElse("charset", "unknown"))
}
object Conversions extends Conversions {

View File

@ -13,12 +13,7 @@ import cats.implicits._
import docspell.backend.BackendApp
import docspell.backend.auth.AuthToken
import docspell.backend.ops.OCustomFields.{RemoveValue, SetValue}
import docspell.backend.ops.OItemSearch.{Batch, Query}
import docspell.backend.ops.OSimpleSearch
import docspell.backend.ops.OSimpleSearch.StringSearchResult
import docspell.common._
import docspell.query.FulltextExtract.Result.TooMany
import docspell.query.FulltextExtract.Result.UnsupportedPosition
import docspell.restapi.model._
import docspell.restserver.Config
import docspell.restserver.conv.Conversions
@ -27,11 +22,11 @@ import docspell.restserver.http4s.ClientRequestInfo
import docspell.restserver.http4s.Responses
import docspell.restserver.http4s.{QueryParam => QP}
import org.http4s.HttpRoutes
import org.http4s.circe.CirceEntityDecoder._
import org.http4s.circe.CirceEntityEncoder._
import org.http4s.dsl.Http4sDsl
import org.http4s.headers._
import org.http4s.{HttpRoutes, Response}
object ItemRoutes {
def apply[F[_]: Async](
@ -40,75 +35,12 @@ object ItemRoutes {
user: AuthToken
): HttpRoutes[F] = {
val logger = docspell.logging.getLogger[F]
val searchPart = ItemSearchPart[F](backend, cfg, user)
val searchPart = ItemSearchPart[F](backend.search, cfg, user)
val dsl = new Http4sDsl[F] {}
import dsl._
searchPart.routes <+>
HttpRoutes.of {
case GET -> Root / "search" :? QP.Query(q) :? QP.Limit(limit) :? QP.Offset(
offset
) :? QP.WithDetails(detailFlag) :? QP.SearchKind(searchMode) =>
val batch = Batch(offset.getOrElse(0), limit.getOrElse(cfg.maxItemPageSize))
.restrictLimitTo(cfg.maxItemPageSize)
val limitCapped = limit.exists(_ > cfg.maxItemPageSize)
val itemQuery = ItemQueryString(q)
val settings = OSimpleSearch.Settings(
batch,
cfg.fullTextSearch.enabled,
detailFlag.getOrElse(false),
cfg.maxNoteLength,
searchMode.getOrElse(SearchMode.Normal)
)
val fixQuery = Query.Fix(user.account, None, None)
searchItems(backend, dsl)(settings, fixQuery, itemQuery, limitCapped)
case GET -> Root / "searchStats" :? QP.Query(q) :? QP.SearchKind(searchMode) =>
val itemQuery = ItemQueryString(q)
val fixQuery = Query.Fix(user.account, None, None)
val settings = OSimpleSearch.StatsSettings(
useFTS = cfg.fullTextSearch.enabled,
searchMode = searchMode.getOrElse(SearchMode.Normal)
)
searchItemStats(backend, dsl)(settings, fixQuery, itemQuery)
case req @ POST -> Root / "search" =>
for {
timed <- Duration.stopTime[F]
userQuery <- req.as[ItemQuery]
batch = Batch(
userQuery.offset.getOrElse(0),
userQuery.limit.getOrElse(cfg.maxItemPageSize)
).restrictLimitTo(
cfg.maxItemPageSize
)
limitCapped = userQuery.limit.exists(_ > cfg.maxItemPageSize)
itemQuery = ItemQueryString(userQuery.query)
settings = OSimpleSearch.Settings(
batch,
cfg.fullTextSearch.enabled,
userQuery.withDetails.getOrElse(false),
cfg.maxNoteLength,
searchMode = userQuery.searchMode.getOrElse(SearchMode.Normal)
)
fixQuery = Query.Fix(user.account, None, None)
resp <- searchItems(backend, dsl)(settings, fixQuery, itemQuery, limitCapped)
dur <- timed
_ <- logger.debug(s"Search request: ${dur.formatExact}")
} yield resp
case req @ POST -> Root / "searchStats" =>
for {
userQuery <- req.as[ItemQuery]
itemQuery = ItemQueryString(userQuery.query)
fixQuery = Query.Fix(user.account, None, None)
settings = OSimpleSearch.StatsSettings(
useFTS = cfg.fullTextSearch.enabled,
searchMode = userQuery.searchMode.getOrElse(SearchMode.Normal)
)
resp <- searchItemStats(backend, dsl)(settings, fixQuery, itemQuery)
} yield resp
case GET -> Root / Ident(id) =>
for {
item <- backend.itemSearch.findItem(id, user.account.collective)
@ -412,82 +344,6 @@ object ItemRoutes {
}
}
def searchItems[F[_]: Sync](
backend: BackendApp[F],
dsl: Http4sDsl[F]
)(
settings: OSimpleSearch.Settings,
fixQuery: Query.Fix,
itemQuery: ItemQueryString,
limitCapped: Boolean
): F[Response[F]] = {
import dsl._
def convertFts(res: OSimpleSearch.Items.FtsItems): ItemLightList =
if (res.indexOnly)
Conversions.mkItemListFtsPlain(res.items, settings.batch, limitCapped)
else Conversions.mkItemListFts(res.items, settings.batch, limitCapped)
def convertFtsFull(res: OSimpleSearch.Items.FtsItemsFull): ItemLightList =
if (res.indexOnly)
Conversions.mkItemListWithTagsFtsPlain(res.items, settings.batch, limitCapped)
else Conversions.mkItemListWithTagsFts(res.items, settings.batch, limitCapped)
backend.simpleSearch
.searchByString(settings)(fixQuery, itemQuery)
.flatMap {
case StringSearchResult.Success(items) =>
Ok(
items.fold(
convertFts,
convertFtsFull,
els => Conversions.mkItemList(els, settings.batch, limitCapped),
els => Conversions.mkItemListWithTags(els, settings.batch, limitCapped)
)
)
case StringSearchResult.FulltextMismatch(TooMany) =>
BadRequest(BasicResult(false, "Only one fulltext search term is allowed."))
case StringSearchResult.FulltextMismatch(UnsupportedPosition) =>
BadRequest(
BasicResult(
false,
"Fulltext search must be in root position or inside the first AND."
)
)
case StringSearchResult.ParseFailed(pf) =>
BadRequest(BasicResult(false, s"Error reading query: ${pf.render}"))
}
}
def searchItemStats[F[_]: Sync](
backend: BackendApp[F],
dsl: Http4sDsl[F]
)(
settings: OSimpleSearch.StatsSettings,
fixQuery: Query.Fix,
itemQuery: ItemQueryString
): F[Response[F]] = {
import dsl._
backend.simpleSearch
.searchSummaryByString(settings)(fixQuery, itemQuery)
.flatMap {
case StringSearchResult.Success(summary) =>
Ok(Conversions.mkSearchStats(summary))
case StringSearchResult.FulltextMismatch(TooMany) =>
BadRequest(BasicResult(false, "Only one fulltext search term is allowed."))
case StringSearchResult.FulltextMismatch(UnsupportedPosition) =>
BadRequest(
BasicResult(
false,
"Fulltext search must be in root position or inside the first AND."
)
)
case StringSearchResult.ParseFailed(pf) =>
BadRequest(BasicResult(false, s"Error reading query: ${pf.render}"))
}
}
implicit final class OptionString(opt: Option[String]) {
def notEmpty: Option[String] =
opt.map(_.trim).filter(_.nonEmpty)

View File

@ -11,26 +11,30 @@ import java.time.LocalDate
import cats.effect._
import cats.syntax.all._
import docspell.backend.BackendApp
import docspell.backend.auth.AuthToken
import docspell.backend.ops.search.QueryParseResult
import docspell.common.{Duration, SearchMode, Timestamp}
import docspell.backend.ops.OShare
import docspell.backend.ops.OShare.ShareQuery
import docspell.backend.ops.search.{OSearch, QueryParseResult}
import docspell.common._
import docspell.query.FulltextExtract.Result
import docspell.restapi.model._
import docspell.restserver.Config
import docspell.restserver.conv.Conversions
import docspell.restserver.http4s.{QueryParam => QP}
import docspell.store.qb.Batch
import docspell.store.queries.ListItemWithTags
import docspell.store.queries.{ListItemWithTags, SearchSummary}
import org.http4s.circe.CirceEntityCodec._
import org.http4s.dsl.Http4sDsl
import org.http4s.{HttpRoutes, Response}
final class ItemSearchPart[F[_]: Async](
backend: BackendApp[F],
searchOps: OSearch[F],
cfg: Config,
authToken: AuthToken
parseQuery: (SearchMode, String) => QueryParseResult,
changeSummary: SearchSummary => SearchSummary = identity,
searchPath: String = "search",
searchStatsPath: String = "searchStats"
) extends Http4sDsl[F] {
private[this] val logger = docspell.logging.getLogger[F]
@ -39,9 +43,9 @@ final class ItemSearchPart[F[_]: Async](
if (!cfg.featureSearch2) HttpRoutes.empty
else
HttpRoutes.of {
case GET -> Root / "search" :? QP.Query(q) :? QP.Limit(limit) :? QP.Offset(
offset
) :? QP.WithDetails(detailFlag) :? QP.SearchKind(searchMode) =>
case GET -> Root / `searchPath` :? QP.Query(q) :? QP.Limit(limit) :?
QP.Offset(offset) :? QP.WithDetails(detailFlag) :?
QP.SearchKind(searchMode) =>
val userQuery =
ItemQuery(offset, limit, detailFlag, searchMode, q.getOrElse(""))
for {
@ -49,7 +53,7 @@ final class ItemSearchPart[F[_]: Async](
resp <- search(userQuery, today)
} yield resp
case req @ POST -> Root / "search" =>
case req @ POST -> Root / `searchPath` =>
for {
timed <- Duration.stopTime[F]
userQuery <- req.as[ItemQuery]
@ -59,14 +63,15 @@ final class ItemSearchPart[F[_]: Async](
_ <- logger.debug(s"Search request: ${dur.formatExact}")
} yield resp
case GET -> Root / "searchStats" :? QP.Query(q) :? QP.SearchKind(searchMode) =>
case GET -> Root / `searchStatsPath` :? QP.Query(q) :?
QP.SearchKind(searchMode) =>
val userQuery = ItemQuery(None, None, None, searchMode, q.getOrElse(""))
for {
today <- Timestamp.current[F].map(_.toUtcDate)
resp <- searchStats(userQuery, today)
} yield resp
case req @ POST -> Root / "searchStats" =>
case req @ POST -> Root / `searchStatsPath` =>
for {
timed <- Duration.stopTime[F]
userQuery <- req.as[ItemQuery]
@ -84,8 +89,8 @@ final class ItemSearchPart[F[_]: Async](
identity,
res =>
for {
summary <- backend.search.searchSummary(today)(res.q, res.ftq)
resp <- Ok(Conversions.mkSearchStats(summary))
summary <- searchOps.searchSummary(today.some)(res.q, res.ftq)
resp <- Ok(Conversions.mkSearchStats(changeSummary(summary)))
} yield resp
)
}
@ -103,8 +108,9 @@ final class ItemSearchPart[F[_]: Async](
identity,
res =>
for {
items <- backend.search
.searchSelect(details, cfg.maxNoteLength, today, batch)(
_ <- logger.warn(s"Searching with query: $res")
items <- searchOps
.searchSelect(details, cfg.maxNoteLength, today.some, batch)(
res.q,
res.ftq
)
@ -122,29 +128,10 @@ final class ItemSearchPart[F[_]: Async](
userQuery: ItemQuery,
mode: SearchMode
): Either[F[Response[F]], QueryParseResult.Success] =
backend.search.parseQueryString(authToken.account, mode, userQuery.query) match {
case s: QueryParseResult.Success =>
Right(s.withFtsEnabled(cfg.fullTextSearch.enabled))
case QueryParseResult.ParseFailed(err) =>
Left(BadRequest(BasicResult(false, s"Invalid query: $err")))
case QueryParseResult.FulltextMismatch(Result.TooMany) =>
Left(
BadRequest(
BasicResult(false, "Only one fulltext search expression is allowed.")
)
)
case QueryParseResult.FulltextMismatch(Result.UnsupportedPosition) =>
Left(
BadRequest(
BasicResult(
false,
"A fulltext search may only appear in the root and expression."
)
)
)
}
convertParseResult(
parseQuery(mode, userQuery.query)
.map(_.withFtsEnabled(cfg.fullTextSearch.enabled))
)
def convert(
items: Vector[ListItemWithTags],
@ -202,13 +189,56 @@ final class ItemSearchPart[F[_]: Async](
Nil
}
)
def convertParseResult(
r: QueryParseResult
): Either[F[Response[F]], QueryParseResult.Success] =
r match {
case s: QueryParseResult.Success =>
Right(s)
case QueryParseResult.ParseFailed(err) =>
BadRequest(BasicResult(false, s"Invalid query: $err")).asLeft
case QueryParseResult.FulltextMismatch(Result.TooMany) =>
BadRequest(
BasicResult(false, "Only one fulltext search expression is allowed.")
).asLeft
case QueryParseResult.FulltextMismatch(Result.UnsupportedPosition) =>
BadRequest(
BasicResult(
false,
"A fulltext search may only appear in the root and expression."
)
).asLeft
}
}
object ItemSearchPart {
def apply[F[_]: Async](
backend: BackendApp[F],
search: OSearch[F],
cfg: Config,
token: AuthToken
): ItemSearchPart[F] =
new ItemSearchPart[F](backend, cfg, token)
new ItemSearchPart[F](
search,
cfg,
(m, s) => search.parseQueryString(token.account, m, s)
)
def apply[F[_]: Async](
search: OSearch[F],
share: OShare[F],
cfg: Config,
shareQuery: ShareQuery
): ItemSearchPart[F] =
new ItemSearchPart[F](
search,
cfg,
(_, s) => share.parseQuery(shareQuery, s),
changeSummary = _.onlyExisting,
searchPath = "query",
searchStatsPath = "stats"
)
}

View File

@ -6,25 +6,14 @@
package docspell.restserver.routes
import cats.data.Kleisli
import cats.effect._
import cats.implicits._
import docspell.backend.BackendApp
import docspell.backend.auth.ShareToken
import docspell.backend.ops.OSimpleSearch
import docspell.backend.ops.OSimpleSearch.StringSearchResult
import docspell.common._
import docspell.query.FulltextExtract.Result.{TooMany, UnsupportedPosition}
import docspell.restapi.model._
import docspell.restserver.Config
import docspell.restserver.conv.Conversions
import docspell.store.qb.Batch
import docspell.store.queries.{Query, SearchSummary}
import org.http4s.circe.CirceEntityDecoder._
import org.http4s.circe.CirceEntityEncoder._
import org.http4s.dsl.Http4sDsl
import org.http4s.{HttpRoutes, Response}
import org.http4s.HttpRoutes
object ShareSearchRoutes {
@ -32,80 +21,13 @@ object ShareSearchRoutes {
backend: BackendApp[F],
cfg: Config,
token: ShareToken
): HttpRoutes[F] = {
val logger = docspell.logging.getLogger[F]
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of {
case req @ POST -> Root / "query" =>
backend.share
.findShareQuery(token.id)
.semiflatMap { share =>
for {
userQuery <- req.as[ItemQuery]
batch = Batch(
userQuery.offset.getOrElse(0),
userQuery.limit.getOrElse(cfg.maxItemPageSize)
).restrictLimitTo(
cfg.maxItemPageSize
)
limitCapped = userQuery.limit.exists(_ > cfg.maxItemPageSize)
itemQuery = ItemQueryString(userQuery.query)
settings = OSimpleSearch.Settings(
batch,
cfg.fullTextSearch.enabled,
userQuery.withDetails.getOrElse(false),
cfg.maxNoteLength,
searchMode = SearchMode.Normal
)
account = share.account
fixQuery = Query.Fix(account, Some(share.query.expr), None)
_ <- logger.debug(s"Searching in share ${share.id.id}: ${userQuery.query}")
resp <- ItemRoutes.searchItems(backend, dsl)(
settings,
fixQuery,
itemQuery,
limitCapped
)
} yield resp
}
.getOrElseF(NotFound())
case req @ POST -> Root / "stats" =>
for {
userQuery <- req.as[ItemQuery]
itemQuery = ItemQueryString(userQuery.query)
settings = OSimpleSearch.StatsSettings(
useFTS = cfg.fullTextSearch.enabled,
searchMode = userQuery.searchMode.getOrElse(SearchMode.Normal)
)
stats <- backend.share.searchSummary(settings)(token.id, itemQuery).value
resp <- stats.map(mkSummaryResponse(dsl)).getOrElse(NotFound())
} yield resp
): HttpRoutes[F] =
Kleisli { req =>
for {
shareQuery <- backend.share.findShareQuery(token.id)
searchPart = ItemSearchPart(backend.search, backend.share, cfg, shareQuery)
routes = searchPart.routes
resp <- routes(req)
} yield resp
}
}
def mkSummaryResponse[F[_]: Sync](
dsl: Http4sDsl[F]
)(r: StringSearchResult[SearchSummary]): F[Response[F]] = {
import dsl._
r match {
case StringSearchResult.Success(summary) =>
Ok(Conversions.mkSearchStats(summary))
case StringSearchResult.FulltextMismatch(TooMany) =>
BadRequest(BasicResult(false, "Fulltext search is not possible in this share."))
case StringSearchResult.FulltextMismatch(UnsupportedPosition) =>
BadRequest(
BasicResult(
false,
"Fulltext search must be in root position or inside the first AND."
)
)
case StringSearchResult.ParseFailed(pf) =>
BadRequest(BasicResult(false, s"Error reading query: ${pf.render}"))
}
}
}