mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-06-06 15:15:58 +00:00
Merge branch 'master' into current-docs
This commit is contained in:
commit
8c33658c27
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
java: [ 'openjdk@1.11' ]
|
java: [ 'openjdk@1.11' ]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2.3.4
|
- uses: actions/checkout@v2.3.5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 100
|
fetch-depth: 100
|
||||||
- uses: jorelali/setup-elm@v3
|
- uses: jorelali/setup-elm@v3
|
||||||
|
2
.github/workflows/docker-image.yml
vendored
2
.github/workflows/docker-image.yml
vendored
@ -6,7 +6,7 @@ jobs:
|
|||||||
docker-images:
|
docker-images:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2.3.4
|
- uses: actions/checkout@v2.3.5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Set current version
|
- name: Set current version
|
||||||
|
2
.github/workflows/release-nightly.yml
vendored
2
.github/workflows/release-nightly.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
java: [ 'openjdk@1.11' ]
|
java: [ 'openjdk@1.11' ]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2.3.4
|
- uses: actions/checkout@v2.3.5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: olafurpg/setup-scala@v13
|
- uses: olafurpg/setup-scala@v13
|
||||||
|
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
java: [ 'openjdk@1.11' ]
|
java: [ 'openjdk@1.11' ]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2.3.4
|
- uses: actions/checkout@v2.3.5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: olafurpg/setup-scala@v13
|
- uses: olafurpg/setup-scala@v13
|
||||||
|
2
.github/workflows/website.yml
vendored
2
.github/workflows/website.yml
vendored
@ -7,7 +7,7 @@ jobs:
|
|||||||
publish-website:
|
publish-website:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2.3.4
|
- uses: actions/checkout@v2.3.5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Set current version
|
- name: Set current version
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -14,5 +14,6 @@ _site/
|
|||||||
/website/site/templates/shortcodes/server.conf
|
/website/site/templates/shortcodes/server.conf
|
||||||
/website/site/templates/shortcodes/sample-exim.conf
|
/website/site/templates/shortcodes/sample-exim.conf
|
||||||
/website/site/templates/shortcodes/joex.conf
|
/website/site/templates/shortcodes/joex.conf
|
||||||
|
/website/site/templates/shortcodes/config.env.txt
|
||||||
/docker/docs
|
/docker/docs
|
||||||
/docker/dev-log
|
/docker/dev-log
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
version = "3.0.4"
|
version = "3.0.6"
|
||||||
|
|
||||||
preset = default
|
preset = default
|
||||||
align.preset = some
|
align.preset = some
|
||||||
|
runner.dialect = scala213
|
||||||
|
|
||||||
maxColumn = 90
|
maxColumn = 90
|
||||||
|
|
||||||
|
63
build.sbt
63
build.sbt
@ -260,6 +260,18 @@ val openapiScalaSettings = Seq(
|
|||||||
.copy(typeDef =
|
.copy(typeDef =
|
||||||
TypeDef("AccountSource", Imports("docspell.common.AccountSource"))
|
TypeDef("AccountSource", Imports("docspell.common.AccountSource"))
|
||||||
)
|
)
|
||||||
|
case "itemquery" =>
|
||||||
|
field =>
|
||||||
|
field
|
||||||
|
.copy(typeDef =
|
||||||
|
TypeDef(
|
||||||
|
"ItemQuery",
|
||||||
|
Imports(
|
||||||
|
"docspell.query.ItemQuery",
|
||||||
|
"docspell.restapi.codec.ItemQueryJson._"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -275,15 +287,29 @@ val common = project
|
|||||||
.settings(testSettingsMUnit)
|
.settings(testSettingsMUnit)
|
||||||
.settings(
|
.settings(
|
||||||
name := "docspell-common",
|
name := "docspell-common",
|
||||||
|
addCompilerPlugin(Dependencies.kindProjectorPlugin),
|
||||||
libraryDependencies ++=
|
libraryDependencies ++=
|
||||||
Dependencies.fs2 ++
|
Dependencies.fs2 ++
|
||||||
Dependencies.circe ++
|
Dependencies.circe ++
|
||||||
Dependencies.loggingApi ++
|
Dependencies.loggingApi ++
|
||||||
Dependencies.calevCore ++
|
Dependencies.calevCore ++
|
||||||
Dependencies.calevCirce ++
|
Dependencies.calevCirce
|
||||||
Dependencies.pureconfig.map(_ % "optional")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
val config = project
|
||||||
|
.in(file("modules/config"))
|
||||||
|
.disablePlugins(RevolverPlugin)
|
||||||
|
.settings(sharedSettings)
|
||||||
|
.settings(testSettingsMUnit)
|
||||||
|
.settings(
|
||||||
|
name := "docspell-config",
|
||||||
|
addCompilerPlugin(Dependencies.kindProjectorPlugin),
|
||||||
|
libraryDependencies ++=
|
||||||
|
Dependencies.fs2 ++
|
||||||
|
Dependencies.pureconfig
|
||||||
|
)
|
||||||
|
.dependsOn(common)
|
||||||
|
|
||||||
// Some example files for testing
|
// Some example files for testing
|
||||||
// https://file-examples.com/index.php/sample-documents-download/sample-doc-download/
|
// https://file-examples.com/index.php/sample-documents-download/sample-doc-download/
|
||||||
val files = project
|
val files = project
|
||||||
@ -366,6 +392,7 @@ val store = project
|
|||||||
.settings(testSettingsMUnit)
|
.settings(testSettingsMUnit)
|
||||||
.settings(
|
.settings(
|
||||||
name := "docspell-store",
|
name := "docspell-store",
|
||||||
|
addCompilerPlugin(Dependencies.kindProjectorPlugin),
|
||||||
libraryDependencies ++=
|
libraryDependencies ++=
|
||||||
Dependencies.doobie ++
|
Dependencies.doobie ++
|
||||||
Dependencies.binny ++
|
Dependencies.binny ++
|
||||||
@ -409,7 +436,8 @@ val convert = project
|
|||||||
name := "docspell-convert",
|
name := "docspell-convert",
|
||||||
libraryDependencies ++=
|
libraryDependencies ++=
|
||||||
Dependencies.flexmark ++
|
Dependencies.flexmark ++
|
||||||
Dependencies.twelvemonkeys
|
Dependencies.twelvemonkeys ++
|
||||||
|
Dependencies.pdfbox
|
||||||
)
|
)
|
||||||
.dependsOn(common, files % "compile->compile;test->test")
|
.dependsOn(common, files % "compile->compile;test->test")
|
||||||
|
|
||||||
@ -470,7 +498,7 @@ val restapi = project
|
|||||||
openapiSpec := (Compile / resourceDirectory).value / "docspell-openapi.yml",
|
openapiSpec := (Compile / resourceDirectory).value / "docspell-openapi.yml",
|
||||||
openapiStaticGen := OpenApiDocGenerator.Redoc
|
openapiStaticGen := OpenApiDocGenerator.Redoc
|
||||||
)
|
)
|
||||||
.dependsOn(common)
|
.dependsOn(common, query.jvm)
|
||||||
|
|
||||||
val joexapi = project
|
val joexapi = project
|
||||||
.in(file("modules/joexapi"))
|
.in(file("modules/joexapi"))
|
||||||
@ -588,7 +616,17 @@ val joex = project
|
|||||||
),
|
),
|
||||||
Revolver.enableDebugging(port = 5051, suspend = false)
|
Revolver.enableDebugging(port = 5051, suspend = false)
|
||||||
)
|
)
|
||||||
.dependsOn(store, backend, extract, convert, analysis, joexapi, restapi, ftssolr)
|
.dependsOn(
|
||||||
|
config,
|
||||||
|
store,
|
||||||
|
backend,
|
||||||
|
extract,
|
||||||
|
convert,
|
||||||
|
analysis,
|
||||||
|
joexapi,
|
||||||
|
restapi,
|
||||||
|
ftssolr
|
||||||
|
)
|
||||||
|
|
||||||
val restserver = project
|
val restserver = project
|
||||||
.in(file("modules/restserver"))
|
.in(file("modules/restserver"))
|
||||||
@ -651,7 +689,7 @@ val restserver = project
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
.dependsOn(restapi, joexapi, backend, webapp, ftssolr, oidc)
|
.dependsOn(config, restapi, joexapi, backend, webapp, ftssolr, oidc)
|
||||||
|
|
||||||
// --- Website Documentation
|
// --- Website Documentation
|
||||||
|
|
||||||
@ -671,7 +709,6 @@ val website = project
|
|||||||
val templateOut = baseDirectory.value / "site" / "templates" / "shortcodes"
|
val templateOut = baseDirectory.value / "site" / "templates" / "shortcodes"
|
||||||
val staticOut = baseDirectory.value / "site" / "static" / "openapi"
|
val staticOut = baseDirectory.value / "site" / "static" / "openapi"
|
||||||
IO.createDirectories(Seq(templateOut, staticOut))
|
IO.createDirectories(Seq(templateOut, staticOut))
|
||||||
val logger = streams.value.log
|
|
||||||
|
|
||||||
val files = Seq(
|
val files = Seq(
|
||||||
(restserver / Compile / resourceDirectory).value / "reference.conf" -> templateOut / "server.conf",
|
(restserver / Compile / resourceDirectory).value / "reference.conf" -> templateOut / "server.conf",
|
||||||
@ -683,6 +720,17 @@ val website = project
|
|||||||
IO.copy(files)
|
IO.copy(files)
|
||||||
files.map(_._2)
|
files.map(_._2)
|
||||||
}.taskValue,
|
}.taskValue,
|
||||||
|
Compile / resourceGenerators += Def.task {
|
||||||
|
val templateOut =
|
||||||
|
baseDirectory.value / "site" / "templates" / "shortcodes" / "config.env.txt"
|
||||||
|
val files = List(
|
||||||
|
(restserver / Compile / resourceDirectory).value / "reference.conf",
|
||||||
|
(joex / Compile / resourceDirectory).value / "reference.conf"
|
||||||
|
)
|
||||||
|
val cfg = EnvConfig.makeConfig(files)
|
||||||
|
EnvConfig.serializeTo(cfg, templateOut)
|
||||||
|
Seq(templateOut)
|
||||||
|
}.taskValue,
|
||||||
Compile / resourceGenerators += Def.task {
|
Compile / resourceGenerators += Def.task {
|
||||||
val changelog = (LocalRootProject / baseDirectory).value / "Changelog.md"
|
val changelog = (LocalRootProject / baseDirectory).value / "Changelog.md"
|
||||||
val targetDir = baseDirectory.value / "site" / "content" / "docs" / "changelog"
|
val targetDir = baseDirectory.value / "site" / "content" / "docs" / "changelog"
|
||||||
@ -716,6 +764,7 @@ val root = project
|
|||||||
)
|
)
|
||||||
.aggregate(
|
.aggregate(
|
||||||
common,
|
common,
|
||||||
|
config,
|
||||||
extract,
|
extract,
|
||||||
convert,
|
convert,
|
||||||
analysis,
|
analysis,
|
||||||
|
@ -19,6 +19,7 @@ services:
|
|||||||
image: docspell/joex:latest
|
image: docspell/joex:latest
|
||||||
container_name: docspell-joex
|
container_name: docspell-joex
|
||||||
command:
|
command:
|
||||||
|
- -J-Xmx3G
|
||||||
- /opt/docspell.conf
|
- /opt/docspell.conf
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
env_file: ./.env
|
env_file: ./.env
|
||||||
@ -50,7 +51,7 @@ services:
|
|||||||
- restserver
|
- restserver
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:13.4
|
image: postgres:14.0
|
||||||
container_name: postgres_db
|
container_name: postgres_db
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -48,6 +48,7 @@ trait BackendApp[F[_]] {
|
|||||||
def simpleSearch: OSimpleSearch[F]
|
def simpleSearch: OSimpleSearch[F]
|
||||||
def clientSettings: OClientSettings[F]
|
def clientSettings: OClientSettings[F]
|
||||||
def totp: OTotp[F]
|
def totp: OTotp[F]
|
||||||
|
def share: OShare[F]
|
||||||
}
|
}
|
||||||
|
|
||||||
object BackendApp {
|
object BackendApp {
|
||||||
@ -85,6 +86,9 @@ object BackendApp {
|
|||||||
customFieldsImpl <- OCustomFields(store)
|
customFieldsImpl <- OCustomFields(store)
|
||||||
simpleSearchImpl = OSimpleSearch(fulltextImpl, itemSearchImpl)
|
simpleSearchImpl = OSimpleSearch(fulltextImpl, itemSearchImpl)
|
||||||
clientSettingsImpl <- OClientSettings(store)
|
clientSettingsImpl <- OClientSettings(store)
|
||||||
|
shareImpl <- Resource.pure(
|
||||||
|
OShare(store, itemSearchImpl, simpleSearchImpl, javaEmil)
|
||||||
|
)
|
||||||
} yield new BackendApp[F] {
|
} yield new BackendApp[F] {
|
||||||
val login = loginImpl
|
val login = loginImpl
|
||||||
val signup = signupImpl
|
val signup = signupImpl
|
||||||
@ -107,16 +111,16 @@ object BackendApp {
|
|||||||
val simpleSearch = simpleSearchImpl
|
val simpleSearch = simpleSearchImpl
|
||||||
val clientSettings = clientSettingsImpl
|
val clientSettings = clientSettingsImpl
|
||||||
val totp = totpImpl
|
val totp = totpImpl
|
||||||
|
val share = shareImpl
|
||||||
}
|
}
|
||||||
|
|
||||||
def apply[F[_]: Async](
|
def apply[F[_]: Async](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
connectEC: ExecutionContext,
|
connectEC: ExecutionContext
|
||||||
httpClientEc: ExecutionContext
|
|
||||||
)(ftsFactory: Client[F] => Resource[F, FtsClient[F]]): Resource[F, BackendApp[F]] =
|
)(ftsFactory: Client[F] => Resource[F, FtsClient[F]]): Resource[F, BackendApp[F]] =
|
||||||
for {
|
for {
|
||||||
store <- Store.create(cfg.jdbc, cfg.files.chunkSize, connectEC)
|
store <- Store.create(cfg.jdbc, cfg.files.chunkSize, connectEC)
|
||||||
httpClient <- BlazeClientBuilder[F](httpClientEc).resource
|
httpClient <- BlazeClientBuilder[F].resource
|
||||||
ftsClient <- ftsFactory(httpClient)
|
ftsClient <- ftsFactory(httpClient)
|
||||||
backend <- create(cfg, store, httpClient, ftsClient)
|
backend <- create(cfg, store, httpClient, ftsClient)
|
||||||
} yield backend
|
} yield backend
|
||||||
|
@ -0,0 +1,52 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.auth
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.implicits._
|
||||||
|
|
||||||
|
import docspell.backend.Common
|
||||||
|
import docspell.common.{Ident, Timestamp}
|
||||||
|
|
||||||
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
|
/** Can be used as an authenticator to access data behind a share. */
|
||||||
|
final case class ShareToken(created: Timestamp, id: Ident, salt: String, sig: String) {
|
||||||
|
def asString = s"${created.toMillis}-${TokenUtil.b64enc(id.id)}-$salt-$sig"
|
||||||
|
|
||||||
|
def sigValid(key: ByteVector): Boolean = {
|
||||||
|
val newSig = TokenUtil.sign(this, key)
|
||||||
|
TokenUtil.constTimeEq(sig, newSig)
|
||||||
|
}
|
||||||
|
def sigInvalid(key: ByteVector): Boolean =
|
||||||
|
!sigValid(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
object ShareToken {
|
||||||
|
|
||||||
|
def fromString(s: String): Either[String, ShareToken] =
|
||||||
|
s.split("-", 4) match {
|
||||||
|
case Array(ms, id, salt, sig) =>
|
||||||
|
for {
|
||||||
|
created <- ms.toLongOption.toRight("Invalid timestamp")
|
||||||
|
idStr <- TokenUtil.b64dec(id).toRight("Cannot read authenticator data")
|
||||||
|
shareId <- Ident.fromString(idStr)
|
||||||
|
} yield ShareToken(Timestamp.ofMillis(created), shareId, salt, sig)
|
||||||
|
|
||||||
|
case _ =>
|
||||||
|
Left("Invalid authenticator")
|
||||||
|
}
|
||||||
|
|
||||||
|
def create[F[_]: Sync](shareId: Ident, key: ByteVector): F[ShareToken] =
|
||||||
|
for {
|
||||||
|
now <- Timestamp.current[F]
|
||||||
|
salt <- Common.genSaltString[F]
|
||||||
|
cd = ShareToken(now, shareId, salt, "")
|
||||||
|
sig = TokenUtil.sign(cd, key)
|
||||||
|
} yield cd.copy(sig = sig)
|
||||||
|
|
||||||
|
}
|
@ -18,17 +18,24 @@ private[auth] object TokenUtil {
|
|||||||
|
|
||||||
def sign(cd: RememberToken, key: ByteVector): String = {
|
def sign(cd: RememberToken, key: ByteVector): String = {
|
||||||
val raw = cd.nowMillis.toString + cd.rememberId.id + cd.salt
|
val raw = cd.nowMillis.toString + cd.rememberId.id + cd.salt
|
||||||
val mac = Mac.getInstance("HmacSHA1")
|
signRaw(raw, key)
|
||||||
mac.init(new SecretKeySpec(key.toArray, "HmacSHA1"))
|
|
||||||
ByteVector.view(mac.doFinal(raw.getBytes(utf8))).toBase64
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def sign(cd: AuthToken, key: ByteVector): String = {
|
def sign(cd: AuthToken, key: ByteVector): String = {
|
||||||
val raw =
|
val raw =
|
||||||
cd.nowMillis.toString + cd.account.asString + cd.requireSecondFactor + cd.salt
|
cd.nowMillis.toString + cd.account.asString + cd.requireSecondFactor + cd.salt
|
||||||
|
signRaw(raw, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
def sign(sd: ShareToken, key: ByteVector): String = {
|
||||||
|
val raw = s"${sd.created.toMillis}${sd.id.id}${sd.salt}"
|
||||||
|
signRaw(raw, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
private def signRaw(data: String, key: ByteVector): String = {
|
||||||
val mac = Mac.getInstance("HmacSHA1")
|
val mac = Mac.getInstance("HmacSHA1")
|
||||||
mac.init(new SecretKeySpec(key.toArray, "HmacSHA1"))
|
mac.init(new SecretKeySpec(key.toArray, "HmacSHA1"))
|
||||||
ByteVector.view(mac.doFinal(raw.getBytes(utf8))).toBase64
|
ByteVector.view(mac.doFinal(data.getBytes(utf8))).toBase64
|
||||||
}
|
}
|
||||||
|
|
||||||
def b64enc(s: String): String =
|
def b64enc(s: String): String =
|
||||||
|
@ -63,6 +63,12 @@ trait OCollective[F[_]] {
|
|||||||
|
|
||||||
def findEnabledSource(sourceId: Ident): F[Option[RSource]]
|
def findEnabledSource(sourceId: Ident): F[Option[RSource]]
|
||||||
|
|
||||||
|
def addPassword(collective: Ident, pw: Password): F[Unit]
|
||||||
|
|
||||||
|
def getPasswords(collective: Ident): F[List[RCollectivePassword]]
|
||||||
|
|
||||||
|
def removePassword(id: Ident): F[Unit]
|
||||||
|
|
||||||
def startLearnClassifier(collective: Ident): F[Unit]
|
def startLearnClassifier(collective: Ident): F[Unit]
|
||||||
|
|
||||||
def startEmptyTrash(args: EmptyTrashArgs): F[Unit]
|
def startEmptyTrash(args: EmptyTrashArgs): F[Unit]
|
||||||
@ -149,7 +155,7 @@ object OCollective {
|
|||||||
private def updateLearnClassifierTask(coll: Ident, sett: Settings): F[Unit] =
|
private def updateLearnClassifierTask(coll: Ident, sett: Settings): F[Unit] =
|
||||||
for {
|
for {
|
||||||
id <- Ident.randomId[F]
|
id <- Ident.randomId[F]
|
||||||
on = sett.classifier.map(_.enabled).getOrElse(false)
|
on = sett.classifier.exists(_.enabled)
|
||||||
timer = sett.classifier.map(_.schedule).getOrElse(CalEvent.unsafe(""))
|
timer = sett.classifier.map(_.schedule).getOrElse(CalEvent.unsafe(""))
|
||||||
args = LearnClassifierArgs(coll)
|
args = LearnClassifierArgs(coll)
|
||||||
ut = UserTask(
|
ut = UserTask(
|
||||||
@ -174,6 +180,18 @@ object OCollective {
|
|||||||
_ <- joex.notifyAllNodes
|
_ <- joex.notifyAllNodes
|
||||||
} yield ()
|
} yield ()
|
||||||
|
|
||||||
|
def addPassword(collective: Ident, pw: Password): F[Unit] =
|
||||||
|
for {
|
||||||
|
cpass <- RCollectivePassword.createNew[F](collective, pw)
|
||||||
|
_ <- store.transact(RCollectivePassword.upsert(cpass))
|
||||||
|
} yield ()
|
||||||
|
|
||||||
|
def getPasswords(collective: Ident): F[List[RCollectivePassword]] =
|
||||||
|
store.transact(RCollectivePassword.findAll(collective))
|
||||||
|
|
||||||
|
def removePassword(id: Ident): F[Unit] =
|
||||||
|
store.transact(RCollectivePassword.deleteById(id)).map(_ => ())
|
||||||
|
|
||||||
def startLearnClassifier(collective: Ident): F[Unit] =
|
def startLearnClassifier(collective: Ident): F[Unit] =
|
||||||
for {
|
for {
|
||||||
id <- Ident.randomId[F]
|
id <- Ident.randomId[F]
|
||||||
|
@ -6,8 +6,6 @@
|
|||||||
|
|
||||||
package docspell.backend.ops
|
package docspell.backend.ops
|
||||||
|
|
||||||
import scala.concurrent.ExecutionContext
|
|
||||||
|
|
||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
@ -42,10 +40,7 @@ object OJoex {
|
|||||||
} yield cancel.success).getOrElse(false)
|
} yield cancel.success).getOrElse(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
def create[F[_]: Async](
|
def create[F[_]: Async](store: Store[F]): Resource[F, OJoex[F]] =
|
||||||
ec: ExecutionContext,
|
JoexClient.resource.flatMap(client => apply(client, store))
|
||||||
store: Store[F]
|
|
||||||
): Resource[F, OJoex[F]] =
|
|
||||||
JoexClient.resource(ec).flatMap(client => apply(client, store))
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -51,6 +51,22 @@ trait OMail[F[_]] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
object OMail {
|
object OMail {
|
||||||
|
sealed trait SendResult
|
||||||
|
|
||||||
|
object SendResult {
|
||||||
|
|
||||||
|
/** Mail was successfully sent and stored to db. */
|
||||||
|
case class Success(id: Ident) extends SendResult
|
||||||
|
|
||||||
|
/** There was a failure sending the mail. The mail is then not saved to db. */
|
||||||
|
case class SendFailure(ex: Throwable) extends SendResult
|
||||||
|
|
||||||
|
/** The mail was successfully sent, but storing to db failed. */
|
||||||
|
case class StoreFailure(ex: Throwable) extends SendResult
|
||||||
|
|
||||||
|
/** Something could not be found required for sending (mail configs, items etc). */
|
||||||
|
case object NotFound extends SendResult
|
||||||
|
}
|
||||||
|
|
||||||
case class Sent(
|
case class Sent(
|
||||||
id: Ident,
|
id: Ident,
|
||||||
|
381
modules/backend/src/main/scala/docspell/backend/ops/OShare.scala
Normal file
381
modules/backend/src/main/scala/docspell/backend/ops/OShare.scala
Normal file
@ -0,0 +1,381 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.backend.ops
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect._
|
||||||
|
import cats.implicits._
|
||||||
|
|
||||||
|
import docspell.backend.PasswordCrypt
|
||||||
|
import docspell.backend.auth.ShareToken
|
||||||
|
import docspell.backend.ops.OItemSearch._
|
||||||
|
import docspell.backend.ops.OShare._
|
||||||
|
import docspell.backend.ops.OSimpleSearch.StringSearchResult
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.query.ItemQuery
|
||||||
|
import docspell.query.ItemQuery.Expr
|
||||||
|
import docspell.query.ItemQuery.Expr.AttachId
|
||||||
|
import docspell.store.Store
|
||||||
|
import docspell.store.queries.SearchSummary
|
||||||
|
import docspell.store.records._
|
||||||
|
|
||||||
|
import emil._
|
||||||
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
|
trait OShare[F[_]] {
|
||||||
|
|
||||||
|
def findAll(
|
||||||
|
collective: Ident,
|
||||||
|
ownerLogin: Option[Ident],
|
||||||
|
query: Option[String]
|
||||||
|
): F[List[ShareData]]
|
||||||
|
|
||||||
|
def delete(id: Ident, collective: Ident): F[Boolean]
|
||||||
|
|
||||||
|
def addNew(share: OShare.NewShare): F[OShare.ChangeResult]
|
||||||
|
|
||||||
|
def findOne(id: Ident, collective: Ident): OptionT[F, ShareData]
|
||||||
|
|
||||||
|
def update(
|
||||||
|
id: Ident,
|
||||||
|
share: OShare.NewShare,
|
||||||
|
removePassword: Boolean
|
||||||
|
): F[OShare.ChangeResult]
|
||||||
|
|
||||||
|
// ---
|
||||||
|
|
||||||
|
/** Verifies the given id and password and returns a authorization token on success. */
|
||||||
|
def verify(key: ByteVector)(id: Ident, password: Option[Password]): F[VerifyResult]
|
||||||
|
|
||||||
|
/** Verifies the authorization token. */
|
||||||
|
def verifyToken(key: ByteVector)(token: String): F[VerifyResult]
|
||||||
|
|
||||||
|
def findShareQuery(id: Ident): OptionT[F, ShareQuery]
|
||||||
|
|
||||||
|
def findAttachmentPreview(
|
||||||
|
attachId: Ident,
|
||||||
|
shareId: Ident
|
||||||
|
): OptionT[F, AttachmentPreviewData[F]]
|
||||||
|
|
||||||
|
def findAttachment(attachId: Ident, shareId: Ident): OptionT[F, AttachmentData[F]]
|
||||||
|
|
||||||
|
def findItem(itemId: Ident, shareId: Ident): OptionT[F, ItemData]
|
||||||
|
|
||||||
|
def searchSummary(
|
||||||
|
settings: OSimpleSearch.StatsSettings
|
||||||
|
)(shareId: Ident, q: ItemQueryString): OptionT[F, StringSearchResult[SearchSummary]]
|
||||||
|
|
||||||
|
def sendMail(account: AccountId, connection: Ident, mail: ShareMail): F[SendResult]
|
||||||
|
}
|
||||||
|
|
||||||
|
object OShare {
|
||||||
|
final case class ShareMail(
|
||||||
|
shareId: Ident,
|
||||||
|
subject: String,
|
||||||
|
recipients: List[MailAddress],
|
||||||
|
cc: List[MailAddress],
|
||||||
|
bcc: List[MailAddress],
|
||||||
|
body: String
|
||||||
|
)
|
||||||
|
|
||||||
|
sealed trait SendResult
|
||||||
|
object SendResult {
|
||||||
|
|
||||||
|
/** Mail was successfully sent and stored to db. */
|
||||||
|
case class Success(msgId: String) extends SendResult
|
||||||
|
|
||||||
|
/** There was a failure sending the mail. The mail is then not saved to db. */
|
||||||
|
case class SendFailure(ex: Throwable) extends SendResult
|
||||||
|
|
||||||
|
/** Something could not be found required for sending (mail configs, items etc). */
|
||||||
|
case object NotFound extends SendResult
|
||||||
|
}
|
||||||
|
|
||||||
|
final case class ShareQuery(id: Ident, account: AccountId, query: ItemQuery)
|
||||||
|
|
||||||
|
sealed trait VerifyResult {
|
||||||
|
def toEither: Either[String, ShareToken] =
|
||||||
|
this match {
|
||||||
|
case VerifyResult.Success(token, _) =>
|
||||||
|
Right(token)
|
||||||
|
case _ => Left("Authentication failed.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
object VerifyResult {
|
||||||
|
case class Success(token: ShareToken, shareName: Option[String]) extends VerifyResult
|
||||||
|
case object NotFound extends VerifyResult
|
||||||
|
case object PasswordMismatch extends VerifyResult
|
||||||
|
case object InvalidToken extends VerifyResult
|
||||||
|
|
||||||
|
def success(token: ShareToken): VerifyResult = Success(token, None)
|
||||||
|
def success(token: ShareToken, name: Option[String]): VerifyResult =
|
||||||
|
Success(token, name)
|
||||||
|
def notFound: VerifyResult = NotFound
|
||||||
|
def passwordMismatch: VerifyResult = PasswordMismatch
|
||||||
|
def invalidToken: VerifyResult = InvalidToken
|
||||||
|
}
|
||||||
|
|
||||||
|
final case class NewShare(
|
||||||
|
account: AccountId,
|
||||||
|
name: Option[String],
|
||||||
|
query: ItemQuery,
|
||||||
|
enabled: Boolean,
|
||||||
|
password: Option[Password],
|
||||||
|
publishUntil: Timestamp
|
||||||
|
)
|
||||||
|
|
||||||
|
sealed trait ChangeResult
|
||||||
|
object ChangeResult {
|
||||||
|
final case class Success(id: Ident) extends ChangeResult
|
||||||
|
case object PublishUntilInPast extends ChangeResult
|
||||||
|
case object NotFound extends ChangeResult
|
||||||
|
|
||||||
|
def success(id: Ident): ChangeResult = Success(id)
|
||||||
|
def publishUntilInPast: ChangeResult = PublishUntilInPast
|
||||||
|
def notFound: ChangeResult = NotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
final case class ShareData(share: RShare, user: RUser)
|
||||||
|
|
||||||
|
def apply[F[_]: Async](
|
||||||
|
store: Store[F],
|
||||||
|
itemSearch: OItemSearch[F],
|
||||||
|
simpleSearch: OSimpleSearch[F],
|
||||||
|
emil: Emil[F]
|
||||||
|
): OShare[F] =
|
||||||
|
new OShare[F] {
|
||||||
|
private[this] val logger = Logger.log4s[F](org.log4s.getLogger)
|
||||||
|
|
||||||
|
def findAll(
|
||||||
|
collective: Ident,
|
||||||
|
ownerLogin: Option[Ident],
|
||||||
|
query: Option[String]
|
||||||
|
): F[List[ShareData]] =
|
||||||
|
store
|
||||||
|
.transact(RShare.findAllByCollective(collective, ownerLogin, query))
|
||||||
|
.map(_.map(ShareData.tupled))
|
||||||
|
|
||||||
|
def delete(id: Ident, collective: Ident): F[Boolean] =
|
||||||
|
store.transact(RShare.deleteByIdAndCid(id, collective)).map(_ > 0)
|
||||||
|
|
||||||
|
def addNew(share: NewShare): F[ChangeResult] =
|
||||||
|
for {
|
||||||
|
curTime <- Timestamp.current[F]
|
||||||
|
id <- Ident.randomId[F]
|
||||||
|
user <- store.transact(RUser.findByAccount(share.account))
|
||||||
|
pass = share.password.map(PasswordCrypt.crypt)
|
||||||
|
record = RShare(
|
||||||
|
id,
|
||||||
|
user.map(_.uid).getOrElse(Ident.unsafe("-error-no-user-")),
|
||||||
|
share.name,
|
||||||
|
share.query,
|
||||||
|
share.enabled,
|
||||||
|
pass,
|
||||||
|
curTime,
|
||||||
|
share.publishUntil,
|
||||||
|
0,
|
||||||
|
None
|
||||||
|
)
|
||||||
|
res <-
|
||||||
|
if (share.publishUntil < curTime) ChangeResult.publishUntilInPast.pure[F]
|
||||||
|
else store.transact(RShare.insert(record)).map(_ => ChangeResult.success(id))
|
||||||
|
} yield res
|
||||||
|
|
||||||
|
def update(
|
||||||
|
id: Ident,
|
||||||
|
share: OShare.NewShare,
|
||||||
|
removePassword: Boolean
|
||||||
|
): F[ChangeResult] =
|
||||||
|
for {
|
||||||
|
curTime <- Timestamp.current[F]
|
||||||
|
user <- store.transact(RUser.findByAccount(share.account))
|
||||||
|
record = RShare(
|
||||||
|
id,
|
||||||
|
user.map(_.uid).getOrElse(Ident.unsafe("-error-no-user-")),
|
||||||
|
share.name,
|
||||||
|
share.query,
|
||||||
|
share.enabled,
|
||||||
|
share.password.map(PasswordCrypt.crypt),
|
||||||
|
Timestamp.Epoch,
|
||||||
|
share.publishUntil,
|
||||||
|
0,
|
||||||
|
None
|
||||||
|
)
|
||||||
|
res <-
|
||||||
|
if (share.publishUntil < curTime) ChangeResult.publishUntilInPast.pure[F]
|
||||||
|
else
|
||||||
|
store
|
||||||
|
.transact(RShare.updateData(record, removePassword))
|
||||||
|
.map(n => if (n > 0) ChangeResult.success(id) else ChangeResult.notFound)
|
||||||
|
} yield res
|
||||||
|
|
||||||
|
def findOne(id: Ident, collective: Ident): OptionT[F, ShareData] =
|
||||||
|
RShare
|
||||||
|
.findOne(id, collective)
|
||||||
|
.mapK(store.transform)
|
||||||
|
.map(ShareData.tupled)
|
||||||
|
|
||||||
|
def verify(
|
||||||
|
key: ByteVector
|
||||||
|
)(id: Ident, password: Option[Password]): F[VerifyResult] =
|
||||||
|
RShare
|
||||||
|
.findCurrentActive(id)
|
||||||
|
.mapK(store.transform)
|
||||||
|
.semiflatMap { case (share, _) =>
|
||||||
|
val pwCheck =
|
||||||
|
share.password.map(encPw => password.exists(PasswordCrypt.check(_, encPw)))
|
||||||
|
|
||||||
|
// add the password (if existing) to the server secret key; this way the token
|
||||||
|
// invalidates when the user changes the password
|
||||||
|
val shareKey =
|
||||||
|
share.password.map(pw => key ++ pw.asByteVector).getOrElse(key)
|
||||||
|
|
||||||
|
val token = ShareToken
|
||||||
|
.create(id, shareKey)
|
||||||
|
.flatTap(_ => store.transact(RShare.incAccess(share.id)))
|
||||||
|
pwCheck match {
|
||||||
|
case Some(true) => token.map(t => VerifyResult.success(t, share.name))
|
||||||
|
case None => token.map(t => VerifyResult.success(t, share.name))
|
||||||
|
case Some(false) => VerifyResult.passwordMismatch.pure[F]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.getOrElse(VerifyResult.notFound)
|
||||||
|
|
||||||
|
def verifyToken(key: ByteVector)(token: String): F[VerifyResult] =
|
||||||
|
ShareToken.fromString(token) match {
|
||||||
|
case Right(st) =>
|
||||||
|
RShare
|
||||||
|
.findActivePassword(st.id)
|
||||||
|
.mapK(store.transform)
|
||||||
|
.semiflatMap { password =>
|
||||||
|
val shareKey =
|
||||||
|
password.map(pw => key ++ pw.asByteVector).getOrElse(key)
|
||||||
|
if (st.sigValid(shareKey)) VerifyResult.success(st).pure[F]
|
||||||
|
else
|
||||||
|
logger.info(
|
||||||
|
s"Signature failure for share: ${st.id.id}"
|
||||||
|
) *> VerifyResult.invalidToken.pure[F]
|
||||||
|
}
|
||||||
|
.getOrElse(VerifyResult.notFound)
|
||||||
|
|
||||||
|
case Left(err) =>
|
||||||
|
logger.debug(s"Invalid session token: $err") *>
|
||||||
|
VerifyResult.invalidToken.pure[F]
|
||||||
|
}
|
||||||
|
|
||||||
|
def findShareQuery(id: Ident): OptionT[F, ShareQuery] =
|
||||||
|
RShare
|
||||||
|
.findCurrentActive(id)
|
||||||
|
.mapK(store.transform)
|
||||||
|
.map { case (share, user) =>
|
||||||
|
ShareQuery(share.id, user.accountId, share.query)
|
||||||
|
}
|
||||||
|
|
||||||
|
def findAttachmentPreview(
|
||||||
|
attachId: Ident,
|
||||||
|
shareId: Ident
|
||||||
|
): OptionT[F, AttachmentPreviewData[F]] =
|
||||||
|
for {
|
||||||
|
sq <- findShareQuery(shareId)
|
||||||
|
_ <- checkAttachment(sq, AttachId(attachId.id))
|
||||||
|
res <- OptionT(
|
||||||
|
itemSearch.findAttachmentPreview(attachId, sq.account.collective)
|
||||||
|
)
|
||||||
|
} yield res
|
||||||
|
|
||||||
|
def findAttachment(attachId: Ident, shareId: Ident): OptionT[F, AttachmentData[F]] =
|
||||||
|
for {
|
||||||
|
sq <- findShareQuery(shareId)
|
||||||
|
_ <- checkAttachment(sq, AttachId(attachId.id))
|
||||||
|
res <- OptionT(itemSearch.findAttachment(attachId, sq.account.collective))
|
||||||
|
} yield res
|
||||||
|
|
||||||
|
def findItem(itemId: Ident, shareId: Ident): OptionT[F, ItemData] =
|
||||||
|
for {
|
||||||
|
sq <- findShareQuery(shareId)
|
||||||
|
_ <- checkAttachment(sq, Expr.itemIdEq(itemId.id))
|
||||||
|
res <- OptionT(itemSearch.findItem(itemId, sq.account.collective))
|
||||||
|
} yield res
|
||||||
|
|
||||||
|
/** Check whether the attachment with the given id is in the results of the given
|
||||||
|
* share
|
||||||
|
*/
|
||||||
|
private def checkAttachment(sq: ShareQuery, idExpr: Expr): OptionT[F, Unit] = {
|
||||||
|
val checkQuery = Query(
|
||||||
|
Query.Fix(sq.account, Some(sq.query.expr), None),
|
||||||
|
Query.QueryExpr(idExpr)
|
||||||
|
)
|
||||||
|
OptionT(
|
||||||
|
itemSearch
|
||||||
|
.findItems(0)(checkQuery, Batch.limit(1))
|
||||||
|
.map(_.headOption.map(_ => ()))
|
||||||
|
).flatTapNone(
|
||||||
|
logger.info(
|
||||||
|
s"Attempt to load unshared data '$idExpr' via share: ${sq.id.id}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def searchSummary(
|
||||||
|
settings: OSimpleSearch.StatsSettings
|
||||||
|
)(
|
||||||
|
shareId: Ident,
|
||||||
|
q: ItemQueryString
|
||||||
|
): OptionT[F, StringSearchResult[SearchSummary]] =
|
||||||
|
findShareQuery(shareId)
|
||||||
|
.semiflatMap { share =>
|
||||||
|
val fix = Query.Fix(share.account, Some(share.query.expr), None)
|
||||||
|
simpleSearch
|
||||||
|
.searchSummaryByString(settings)(fix, q)
|
||||||
|
.map {
|
||||||
|
case StringSearchResult.Success(summary) =>
|
||||||
|
StringSearchResult.Success(summary.onlyExisting)
|
||||||
|
case other => other
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def sendMail(
|
||||||
|
account: AccountId,
|
||||||
|
connection: Ident,
|
||||||
|
mail: ShareMail
|
||||||
|
): F[SendResult] = {
|
||||||
|
val getSmtpSettings: OptionT[F, RUserEmail] =
|
||||||
|
OptionT(store.transact(RUserEmail.getByName(account, connection)))
|
||||||
|
|
||||||
|
def createMail(sett: RUserEmail): OptionT[F, Mail[F]] = {
|
||||||
|
import _root_.emil.builder._
|
||||||
|
|
||||||
|
OptionT.pure(
|
||||||
|
MailBuilder.build(
|
||||||
|
From(sett.mailFrom),
|
||||||
|
Tos(mail.recipients),
|
||||||
|
Ccs(mail.cc),
|
||||||
|
Bccs(mail.bcc),
|
||||||
|
XMailer.emil,
|
||||||
|
Subject(mail.subject),
|
||||||
|
TextBody[F](mail.body)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def sendMail(cfg: MailConfig, mail: Mail[F]): F[Either[SendResult, String]] =
|
||||||
|
emil(cfg).send(mail).map(_.head).attempt.map(_.left.map(SendResult.SendFailure))
|
||||||
|
|
||||||
|
(for {
|
||||||
|
_ <- RShare
|
||||||
|
.findCurrentActive(mail.shareId)
|
||||||
|
.filter(_._2.cid == account.collective)
|
||||||
|
.mapK(store.transform)
|
||||||
|
mailCfg <- getSmtpSettings
|
||||||
|
mail <- createMail(mailCfg)
|
||||||
|
mid <- OptionT.liftF(sendMail(mailCfg.toMailConfig, mail))
|
||||||
|
conv = mid.fold(identity, id => SendResult.Success(id))
|
||||||
|
} yield conv).getOrElse(SendResult.NotFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
@ -1,26 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2020 Eike K. & Contributors
|
|
||||||
*
|
|
||||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
*/
|
|
||||||
|
|
||||||
package docspell.backend.ops
|
|
||||||
|
|
||||||
import docspell.common._
|
|
||||||
|
|
||||||
sealed trait SendResult
|
|
||||||
|
|
||||||
object SendResult {
|
|
||||||
|
|
||||||
/** Mail was successfully sent and stored to db. */
|
|
||||||
case class Success(id: Ident) extends SendResult
|
|
||||||
|
|
||||||
/** There was a failure sending the mail. The mail is then not saved to db. */
|
|
||||||
case class SendFailure(ex: Throwable) extends SendResult
|
|
||||||
|
|
||||||
/** The mail was successfully sent, but storing to db failed. */
|
|
||||||
case class StoreFailure(ex: Throwable) extends SendResult
|
|
||||||
|
|
||||||
/** Something could not be found required for sending (mail configs, items etc). */
|
|
||||||
case object NotFound extends SendResult
|
|
||||||
}
|
|
@ -8,6 +8,7 @@ package docspell.common
|
|||||||
|
|
||||||
import io.circe._
|
import io.circe._
|
||||||
|
|
||||||
|
/** The collective and user name. */
|
||||||
case class AccountId(collective: Ident, user: Ident) {
|
case class AccountId(collective: Ident, user: Ident) {
|
||||||
def asString =
|
def asString =
|
||||||
if (collective == user) user.id
|
if (collective == user) user.id
|
||||||
|
@ -7,12 +7,13 @@
|
|||||||
package docspell.common
|
package docspell.common
|
||||||
|
|
||||||
import cats.effect.Sync
|
import cats.effect.Sync
|
||||||
|
import fs2.Stream
|
||||||
|
|
||||||
import docspell.common.syntax.all._
|
import docspell.common.syntax.all._
|
||||||
|
|
||||||
import org.log4s.{Logger => Log4sLogger}
|
import org.log4s.{Logger => Log4sLogger}
|
||||||
|
|
||||||
trait Logger[F[_]] {
|
trait Logger[F[_]] { self =>
|
||||||
|
|
||||||
def trace(msg: => String): F[Unit]
|
def trace(msg: => String): F[Unit]
|
||||||
def debug(msg: => String): F[Unit]
|
def debug(msg: => String): F[Unit]
|
||||||
@ -21,6 +22,25 @@ trait Logger[F[_]] {
|
|||||||
def error(ex: Throwable)(msg: => String): F[Unit]
|
def error(ex: Throwable)(msg: => String): F[Unit]
|
||||||
def error(msg: => String): F[Unit]
|
def error(msg: => String): F[Unit]
|
||||||
|
|
||||||
|
final def s: Logger[Stream[F, *]] = new Logger[Stream[F, *]] {
|
||||||
|
def trace(msg: => String): Stream[F, Unit] =
|
||||||
|
Stream.eval(self.trace(msg))
|
||||||
|
|
||||||
|
def debug(msg: => String): Stream[F, Unit] =
|
||||||
|
Stream.eval(self.debug(msg))
|
||||||
|
|
||||||
|
def info(msg: => String): Stream[F, Unit] =
|
||||||
|
Stream.eval(self.info(msg))
|
||||||
|
|
||||||
|
def warn(msg: => String): Stream[F, Unit] =
|
||||||
|
Stream.eval(self.warn(msg))
|
||||||
|
|
||||||
|
def error(msg: => String): Stream[F, Unit] =
|
||||||
|
Stream.eval(self.error(msg))
|
||||||
|
|
||||||
|
def error(ex: Throwable)(msg: => String): Stream[F, Unit] =
|
||||||
|
Stream.eval(self.error(ex)(msg))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
object Logger {
|
object Logger {
|
||||||
|
@ -6,18 +6,29 @@
|
|||||||
|
|
||||||
package docspell.common
|
package docspell.common
|
||||||
|
|
||||||
|
import java.nio.charset.StandardCharsets
|
||||||
|
|
||||||
import cats.effect.Sync
|
import cats.effect.Sync
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
import io.circe.{Decoder, Encoder}
|
import io.circe.{Decoder, Encoder}
|
||||||
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
final class Password(val pass: String) extends AnyVal {
|
final class Password(val pass: String) extends AnyVal {
|
||||||
|
|
||||||
def isEmpty: Boolean = pass.isEmpty
|
def isEmpty: Boolean = pass.isEmpty
|
||||||
|
def nonEmpty: Boolean = pass.nonEmpty
|
||||||
|
def length: Int = pass.length
|
||||||
|
|
||||||
|
def asByteVector: ByteVector =
|
||||||
|
ByteVector.view(pass.getBytes(StandardCharsets.UTF_8))
|
||||||
|
|
||||||
override def toString: String =
|
override def toString: String =
|
||||||
if (pass.isEmpty) "<empty>" else "***"
|
if (pass.isEmpty) "<empty>" else "***"
|
||||||
|
|
||||||
|
def compare(other: Password): Boolean =
|
||||||
|
this.pass.zip(other.pass).forall { case (a, b) => a == b } &&
|
||||||
|
this.nonEmpty && this.length == other.length
|
||||||
}
|
}
|
||||||
|
|
||||||
object Password {
|
object Password {
|
||||||
|
@ -10,7 +10,7 @@ import scala.concurrent.ExecutionContext
|
|||||||
|
|
||||||
/** Captures thread pools to use in an application. */
|
/** Captures thread pools to use in an application. */
|
||||||
case class Pools(
|
case class Pools(
|
||||||
connectEC: ExecutionContext,
|
connectEC: ExecutionContext
|
||||||
httpClientEC: ExecutionContext,
|
// httpClientEC: ExecutionContext,
|
||||||
restEC: ExecutionContext
|
// restEC: ExecutionContext
|
||||||
)
|
)
|
||||||
|
@ -51,6 +51,9 @@ case class Timestamp(value: Instant) {
|
|||||||
|
|
||||||
def <(other: Timestamp): Boolean =
|
def <(other: Timestamp): Boolean =
|
||||||
this.value.isBefore(other.value)
|
this.value.isBefore(other.value)
|
||||||
|
|
||||||
|
def >(other: Timestamp): Boolean =
|
||||||
|
this.value.isAfter(other.value)
|
||||||
}
|
}
|
||||||
|
|
||||||
object Timestamp {
|
object Timestamp {
|
||||||
@ -67,6 +70,9 @@ object Timestamp {
|
|||||||
def atUtc(ldt: LocalDateTime): Timestamp =
|
def atUtc(ldt: LocalDateTime): Timestamp =
|
||||||
from(ldt.atZone(UTC))
|
from(ldt.atZone(UTC))
|
||||||
|
|
||||||
|
def ofMillis(ms: Long): Timestamp =
|
||||||
|
Timestamp(Instant.ofEpochMilli(ms))
|
||||||
|
|
||||||
def daysBetween(ts0: Timestamp, ts1: Timestamp): Long =
|
def daysBetween(ts0: Timestamp, ts1: Timestamp): Long =
|
||||||
ChronoUnit.DAYS.between(ts0.toUtcDate, ts1.toUtcDate)
|
ChronoUnit.DAYS.between(ts0.toUtcDate, ts1.toUtcDate)
|
||||||
|
|
||||||
|
@ -0,0 +1,108 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.config
|
||||||
|
|
||||||
|
import scala.reflect.ClassTag
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect._
|
||||||
|
import cats.implicits._
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import docspell.common.Logger
|
||||||
|
|
||||||
|
import pureconfig.{ConfigReader, ConfigSource}
|
||||||
|
|
||||||
|
object ConfigFactory {
|
||||||
|
|
||||||
|
/** Reads the configuration trying the following in order:
|
||||||
|
* 1. if 'args' contains at least one element, the first is interpreted as a config
|
||||||
|
* file
|
||||||
|
* 1. otherwise check the system property 'config.file' for an existing file and use
|
||||||
|
* it if it does exist; ignore if it doesn't exist
|
||||||
|
* 1. if no file is found, read the config from environment variables falling back to
|
||||||
|
* the default config
|
||||||
|
*/
|
||||||
|
def default[F[_]: Async, C: ClassTag: ConfigReader](logger: Logger[F], atPath: String)(
|
||||||
|
args: List[String]
|
||||||
|
): F[C] =
|
||||||
|
findFileFromArgs(args).flatMap {
|
||||||
|
case Some(file) =>
|
||||||
|
logger.info(s"Using config file: $file") *>
|
||||||
|
readFile[F, C](file, atPath)
|
||||||
|
case None =>
|
||||||
|
checkSystemProperty.value.flatMap {
|
||||||
|
case Some(file) =>
|
||||||
|
logger.info(s"Using config file from system property: $file") *>
|
||||||
|
readConfig(atPath)
|
||||||
|
case None =>
|
||||||
|
logger.info("Using config from environment variables!") *>
|
||||||
|
readEnv(atPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Reads the configuration from the given file. */
|
||||||
|
private def readFile[F[_]: Sync, C: ClassTag: ConfigReader](
|
||||||
|
file: Path,
|
||||||
|
at: String
|
||||||
|
): F[C] =
|
||||||
|
Sync[F].delay {
|
||||||
|
System.setProperty(
|
||||||
|
"config.file",
|
||||||
|
file.toNioPath.toAbsolutePath.normalize.toString
|
||||||
|
)
|
||||||
|
ConfigSource.default.at(at).loadOrThrow[C]
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Reads the config as specified in typesafe's config library; usually loading the file
|
||||||
|
* given as system property 'config.file'.
|
||||||
|
*/
|
||||||
|
private def readConfig[F[_]: Sync, C: ClassTag: ConfigReader](
|
||||||
|
at: String
|
||||||
|
): F[C] =
|
||||||
|
Sync[F].delay(ConfigSource.default.at(at).loadOrThrow[C])
|
||||||
|
|
||||||
|
/** Reads the configuration from environment variables. */
|
||||||
|
private def readEnv[F[_]: Sync, C: ClassTag: ConfigReader](at: String): F[C] =
|
||||||
|
Sync[F].delay(ConfigSource.fromConfig(EnvConfig.get).at(at).loadOrThrow[C])
|
||||||
|
|
||||||
|
/** Uses the first argument as a path to the config file. If it is specified but the
|
||||||
|
* file doesn't exist, an exception is thrown.
|
||||||
|
*/
|
||||||
|
private def findFileFromArgs[F[_]: Async](args: List[String]): F[Option[Path]] =
|
||||||
|
args.headOption
|
||||||
|
.map(Path.apply)
|
||||||
|
.traverse(p =>
|
||||||
|
Files[F].exists(p).flatMap {
|
||||||
|
case true => p.pure[F]
|
||||||
|
case false => Async[F].raiseError(new Exception(s"File not found: $p"))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
/** If the system property 'config.file' is set, it is checked whether the file exists.
|
||||||
|
* If it doesn't exist, the property is removed to not raise any exception. In contrast
|
||||||
|
* to giving the file as argument, it is not an error to specify a non-existing file
|
||||||
|
* via a system property.
|
||||||
|
*/
|
||||||
|
private def checkSystemProperty[F[_]: Async]: OptionT[F, Path] =
|
||||||
|
for {
|
||||||
|
cf <- OptionT(
|
||||||
|
Sync[F].delay(
|
||||||
|
Option(System.getProperty("config.file")).map(_.trim).filter(_.nonEmpty)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
cp = Path(cf)
|
||||||
|
exists <- OptionT.liftF(Files[F].exists(cp))
|
||||||
|
file <-
|
||||||
|
if (exists) OptionT.pure[F](cp)
|
||||||
|
else
|
||||||
|
OptionT
|
||||||
|
.liftF(Sync[F].delay(System.clearProperty("config.file")))
|
||||||
|
.flatMap(_ => OptionT.none[F, Path])
|
||||||
|
} yield file
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,78 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.config
|
||||||
|
|
||||||
|
import java.util.Properties
|
||||||
|
|
||||||
|
import scala.collection.{MapView, mutable}
|
||||||
|
import scala.jdk.CollectionConverters._
|
||||||
|
|
||||||
|
import com.typesafe.config.{Config, ConfigFactory}
|
||||||
|
|
||||||
|
/** Creates a config from environment variables.
|
||||||
|
*
|
||||||
|
* The env variables are expected to be in same form as the config keys with the
|
||||||
|
* following mangling: a dot is replaced by an underscore character, because this is the
|
||||||
|
* standard separator for env variables. In order to represent dashes, two underscores
|
||||||
|
* are needed (and for one underscore use three underscores in the env variable).
|
||||||
|
*
|
||||||
|
* For example, the config key
|
||||||
|
* {{{
|
||||||
|
* docspell.server.app-name
|
||||||
|
* }}}
|
||||||
|
* can be given as env variable
|
||||||
|
* {{{
|
||||||
|
* DOCSPELL_SERVER_APP__NAME
|
||||||
|
* }}}
|
||||||
|
*/
|
||||||
|
object EnvConfig {
|
||||||
|
|
||||||
|
/** The config from current environment. */
|
||||||
|
lazy val get: Config =
|
||||||
|
loadFrom(System.getenv().asScala.view)
|
||||||
|
|
||||||
|
def loadFrom(env: MapView[String, String]): Config = {
|
||||||
|
val cfg = new Properties()
|
||||||
|
for (key <- env.keySet if key.startsWith("DOCSPELL_"))
|
||||||
|
cfg.setProperty(envToProp(key), env(key))
|
||||||
|
|
||||||
|
ConfigFactory
|
||||||
|
.parseProperties(cfg)
|
||||||
|
.withFallback(ConfigFactory.defaultReference())
|
||||||
|
.resolve()
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Docspell has all lowercase key names and uses snake case.
|
||||||
|
*
|
||||||
|
* So this converts to lowercase and then replaces underscores (like
|
||||||
|
* [[com.typesafe.config.ConfigFactory.systemEnvironmentOverrides()]]
|
||||||
|
*
|
||||||
|
* - 3 underscores -> `_` (underscore)
|
||||||
|
* - 2 underscores -> `-` (dash)
|
||||||
|
* - 1 underscore -> `.` (dot)
|
||||||
|
*/
|
||||||
|
private[config] def envToProp(v: String): String = {
|
||||||
|
val len = v.length
|
||||||
|
val buffer = new mutable.StringBuilder()
|
||||||
|
val underscoreMapping = Map(3 -> '_', 2 -> '-', 1 -> '.').withDefault(_ => '_')
|
||||||
|
@annotation.tailrec
|
||||||
|
def go(current: Int, underscores: Int): String =
|
||||||
|
if (current >= len) buffer.toString()
|
||||||
|
else
|
||||||
|
v.charAt(current) match {
|
||||||
|
case '_' => go(current + 1, underscores + 1)
|
||||||
|
case c =>
|
||||||
|
if (underscores > 0) {
|
||||||
|
buffer.append(underscoreMapping(underscores))
|
||||||
|
}
|
||||||
|
buffer.append(c.toLower)
|
||||||
|
go(current + 1, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
go(0, 0)
|
||||||
|
}
|
||||||
|
}
|
@ -4,7 +4,7 @@
|
|||||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package docspell.common.config
|
package docspell.config
|
||||||
|
|
||||||
import java.nio.file.{Path => JPath}
|
import java.nio.file.{Path => JPath}
|
||||||
|
|
||||||
@ -15,12 +15,11 @@ import fs2.io.file.Path
|
|||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
|
||||||
import com.github.eikek.calev.CalEvent
|
import com.github.eikek.calev.CalEvent
|
||||||
import pureconfig._
|
import pureconfig.ConfigReader
|
||||||
import pureconfig.error.{CannotConvert, FailureReason}
|
import pureconfig.error.{CannotConvert, FailureReason}
|
||||||
import scodec.bits.ByteVector
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
object Implicits {
|
object Implicits {
|
||||||
|
|
||||||
implicit val accountIdReader: ConfigReader[AccountId] =
|
implicit val accountIdReader: ConfigReader[AccountId] =
|
||||||
ConfigReader[String].emap(reason(AccountId.parse))
|
ConfigReader[String].emap(reason(AccountId.parse))
|
||||||
|
|
5
modules/config/src/test/resources/reference.conf
Normal file
5
modules/config/src/test/resources/reference.conf
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
docspell.server {
|
||||||
|
bind {
|
||||||
|
port = 7880
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,45 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.config
|
||||||
|
|
||||||
|
import munit.FunSuite
|
||||||
|
|
||||||
|
class EnvConfigTest extends FunSuite {
|
||||||
|
|
||||||
|
test("convert underscores") {
|
||||||
|
assertEquals(EnvConfig.envToProp("A_B_C"), "a.b.c")
|
||||||
|
assertEquals(EnvConfig.envToProp("A_B__C"), "a.b-c")
|
||||||
|
assertEquals(EnvConfig.envToProp("AA_BB__CC___D"), "aa.bb-cc_d")
|
||||||
|
}
|
||||||
|
|
||||||
|
test("insert docspell keys") {
|
||||||
|
val cfg = EnvConfig.loadFrom(
|
||||||
|
Map(
|
||||||
|
"DOCSPELL_SERVER_APP__NAME" -> "Hello!",
|
||||||
|
"DOCSPELL_JOEX_BIND_PORT" -> "1234"
|
||||||
|
).view
|
||||||
|
)
|
||||||
|
|
||||||
|
assertEquals(cfg.getString("docspell.server.app-name"), "Hello!")
|
||||||
|
assertEquals(cfg.getInt("docspell.joex.bind.port"), 1234)
|
||||||
|
}
|
||||||
|
|
||||||
|
test("find default values from reference.conf") {
|
||||||
|
val cfg = EnvConfig.loadFrom(
|
||||||
|
Map(
|
||||||
|
"DOCSPELL_SERVER_APP__NAME" -> "Hello!",
|
||||||
|
"DOCSPELL_JOEX_BIND_PORT" -> "1234"
|
||||||
|
).view
|
||||||
|
)
|
||||||
|
assertEquals(cfg.getInt("docspell.server.bind.port"), 7880)
|
||||||
|
}
|
||||||
|
|
||||||
|
test("discard non docspell keys") {
|
||||||
|
val cfg = EnvConfig.loadFrom(Map("A_B_C" -> "12").view)
|
||||||
|
assert(!cfg.hasPath("a.b.c"))
|
||||||
|
}
|
||||||
|
}
|
@ -33,6 +33,7 @@ object Conversion {
|
|||||||
def create[F[_]: Async](
|
def create[F[_]: Async](
|
||||||
cfg: ConvertConfig,
|
cfg: ConvertConfig,
|
||||||
sanitizeHtml: SanitizeHtml,
|
sanitizeHtml: SanitizeHtml,
|
||||||
|
additionalPasswords: List[Password],
|
||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
): Resource[F, Conversion[F]] =
|
): Resource[F, Conversion[F]] =
|
||||||
Resource.pure[F, Conversion[F]](new Conversion[F] {
|
Resource.pure[F, Conversion[F]](new Conversion[F] {
|
||||||
@ -42,8 +43,16 @@ object Conversion {
|
|||||||
): F[A] =
|
): F[A] =
|
||||||
TikaMimetype.resolve(dataType, in).flatMap {
|
TikaMimetype.resolve(dataType, in).flatMap {
|
||||||
case MimeType.PdfMatch(_) =>
|
case MimeType.PdfMatch(_) =>
|
||||||
|
val allPass = cfg.decryptPdf.passwords ++ additionalPasswords
|
||||||
|
val pdfStream =
|
||||||
|
if (cfg.decryptPdf.enabled) {
|
||||||
|
logger.s
|
||||||
|
.debug(s"Trying to read the PDF using ${allPass.size} passwords")
|
||||||
|
.drain ++
|
||||||
|
in.through(RemovePdfEncryption(logger, allPass))
|
||||||
|
} else in
|
||||||
OcrMyPdf
|
OcrMyPdf
|
||||||
.toPDF(cfg.ocrmypdf, lang, cfg.chunkSize, logger)(in, handler)
|
.toPDF(cfg.ocrmypdf, lang, cfg.chunkSize, logger)(pdfStream, handler)
|
||||||
|
|
||||||
case MimeType.HtmlMatch(mt) =>
|
case MimeType.HtmlMatch(mt) =>
|
||||||
val cs = mt.charsetOrUtf8
|
val cs = mt.charsetOrUtf8
|
||||||
|
@ -6,11 +6,13 @@
|
|||||||
|
|
||||||
package docspell.convert
|
package docspell.convert
|
||||||
|
|
||||||
|
import docspell.common.Password
|
||||||
|
import docspell.convert.ConvertConfig.DecryptPdf
|
||||||
import docspell.convert.extern.OcrMyPdfConfig
|
import docspell.convert.extern.OcrMyPdfConfig
|
||||||
import docspell.convert.extern.{TesseractConfig, UnoconvConfig, WkHtmlPdfConfig}
|
import docspell.convert.extern.{TesseractConfig, UnoconvConfig, WkHtmlPdfConfig}
|
||||||
import docspell.convert.flexmark.MarkdownConfig
|
import docspell.convert.flexmark.MarkdownConfig
|
||||||
|
|
||||||
case class ConvertConfig(
|
final case class ConvertConfig(
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
convertedFilenamePart: String,
|
convertedFilenamePart: String,
|
||||||
maxImageSize: Int,
|
maxImageSize: Int,
|
||||||
@ -18,5 +20,11 @@ case class ConvertConfig(
|
|||||||
wkhtmlpdf: WkHtmlPdfConfig,
|
wkhtmlpdf: WkHtmlPdfConfig,
|
||||||
tesseract: TesseractConfig,
|
tesseract: TesseractConfig,
|
||||||
unoconv: UnoconvConfig,
|
unoconv: UnoconvConfig,
|
||||||
ocrmypdf: OcrMyPdfConfig
|
ocrmypdf: OcrMyPdfConfig,
|
||||||
|
decryptPdf: DecryptPdf
|
||||||
)
|
)
|
||||||
|
|
||||||
|
object ConvertConfig {
|
||||||
|
|
||||||
|
final case class DecryptPdf(enabled: Boolean, passwords: List[Password])
|
||||||
|
}
|
||||||
|
@ -0,0 +1,88 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.convert
|
||||||
|
|
||||||
|
import java.io.ByteArrayOutputStream
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import fs2.{Chunk, Pipe, Stream}
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
|
||||||
|
import org.apache.pdfbox.pdmodel.PDDocument
|
||||||
|
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException
|
||||||
|
|
||||||
|
/** Using PDFBox, the incoming pdf is loaded while trying the given passwords. */
|
||||||
|
object RemovePdfEncryption {
|
||||||
|
|
||||||
|
def apply[F[_]: Sync](
|
||||||
|
logger: Logger[F],
|
||||||
|
passwords: List[Password]
|
||||||
|
): Pipe[F, Byte, Byte] =
|
||||||
|
apply(logger, Stream.emits(passwords))
|
||||||
|
|
||||||
|
def apply[F[_]: Sync](
|
||||||
|
logger: Logger[F],
|
||||||
|
passwords: Stream[F, Password]
|
||||||
|
): Pipe[F, Byte, Byte] = {
|
||||||
|
val pws = passwords.cons1(Password.empty)
|
||||||
|
in =>
|
||||||
|
pws
|
||||||
|
.flatMap(pw => in.through(openPdf[F](logger, pw)))
|
||||||
|
.head
|
||||||
|
.flatMap { doc =>
|
||||||
|
if (doc.isEncrypted) {
|
||||||
|
logger.s.debug("Removing protection/encryption from PDF").drain ++
|
||||||
|
Stream.eval(Sync[F].delay(doc.setAllSecurityToBeRemoved(true))).drain ++
|
||||||
|
toStream[F](doc)
|
||||||
|
} else {
|
||||||
|
in
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.ifEmpty(
|
||||||
|
logger.s
|
||||||
|
.info(
|
||||||
|
s"None of the passwords helped to read the given PDF!"
|
||||||
|
)
|
||||||
|
.drain ++ in
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private def openPdf[F[_]: Sync](
|
||||||
|
logger: Logger[F],
|
||||||
|
pw: Password
|
||||||
|
): Pipe[F, Byte, PDDocument] = {
|
||||||
|
def alloc(bytes: Array[Byte]): F[Option[PDDocument]] =
|
||||||
|
Sync[F].delay(load(bytes, pw))
|
||||||
|
|
||||||
|
def free(doc: Option[PDDocument]): F[Unit] =
|
||||||
|
Sync[F].delay(doc.foreach(_.close()))
|
||||||
|
|
||||||
|
val log =
|
||||||
|
if (pw.isEmpty) Stream.empty
|
||||||
|
else logger.s.debug(s"Try opening PDF with password: ${pw.pass.take(2)}***").drain
|
||||||
|
|
||||||
|
in =>
|
||||||
|
Stream
|
||||||
|
.eval(in.compile.to(Array))
|
||||||
|
.flatMap(bytes => log ++ Stream.bracket(alloc(bytes))(free))
|
||||||
|
.flatMap(opt => opt.map(Stream.emit).getOrElse(Stream.empty))
|
||||||
|
}
|
||||||
|
|
||||||
|
private def load(bytes: Array[Byte], pw: Password): Option[PDDocument] =
|
||||||
|
try Option(PDDocument.load(bytes, pw.pass))
|
||||||
|
catch {
|
||||||
|
case _: InvalidPasswordException =>
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
private def toStream[F[_]](doc: PDDocument): Stream[F, Byte] = {
|
||||||
|
val baos = new ByteArrayOutputStream()
|
||||||
|
doc.save(baos)
|
||||||
|
Stream.chunk(Chunk.array(baos.toByteArray))
|
||||||
|
}
|
||||||
|
}
|
@ -74,11 +74,12 @@ class ConversionTest extends FunSuite with FileChecks {
|
|||||||
Duration.seconds(20)
|
Duration.seconds(20)
|
||||||
),
|
),
|
||||||
target
|
target
|
||||||
)
|
),
|
||||||
|
ConvertConfig.DecryptPdf(true, Nil)
|
||||||
)
|
)
|
||||||
|
|
||||||
val conversion =
|
val conversion =
|
||||||
Conversion.create[IO](convertConfig, SanitizeHtml.none, logger)
|
Conversion.create[IO](convertConfig, SanitizeHtml.none, Nil, logger)
|
||||||
|
|
||||||
val bombs = List(
|
val bombs = List(
|
||||||
ExampleFiles.bombs_20K_gray_jpeg,
|
ExampleFiles.bombs_20K_gray_jpeg,
|
||||||
|
@ -9,6 +9,8 @@ package docspell.convert
|
|||||||
import java.nio.charset.StandardCharsets
|
import java.nio.charset.StandardCharsets
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
|
|
||||||
|
import scala.util.Try
|
||||||
|
|
||||||
import cats.data.Kleisli
|
import cats.data.Kleisli
|
||||||
import cats.effect.IO
|
import cats.effect.IO
|
||||||
import cats.effect.unsafe.implicits.global
|
import cats.effect.unsafe.implicits.global
|
||||||
@ -19,6 +21,9 @@ import docspell.common._
|
|||||||
import docspell.convert.ConversionResult.Handler
|
import docspell.convert.ConversionResult.Handler
|
||||||
import docspell.files.TikaMimetype
|
import docspell.files.TikaMimetype
|
||||||
|
|
||||||
|
import org.apache.pdfbox.pdmodel.PDDocument
|
||||||
|
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException
|
||||||
|
|
||||||
trait FileChecks {
|
trait FileChecks {
|
||||||
|
|
||||||
implicit class FileCheckOps(p: Path) {
|
implicit class FileCheckOps(p: Path) {
|
||||||
@ -34,15 +39,46 @@ trait FileChecks {
|
|||||||
|
|
||||||
def isPlainText: Boolean =
|
def isPlainText: Boolean =
|
||||||
isType(MimeType.text("plain"))
|
isType(MimeType.text("plain"))
|
||||||
|
|
||||||
|
def isUnencryptedPDF: Boolean =
|
||||||
|
Try(PDDocument.load(p.toNioPath.toFile)).map(_.close()).isSuccess
|
||||||
|
}
|
||||||
|
|
||||||
|
implicit class ByteStreamOps(delegate: Stream[IO, Byte]) {
|
||||||
|
def isNonEmpty: IO[Boolean] =
|
||||||
|
delegate.head.compile.last.map(_.isDefined)
|
||||||
|
|
||||||
|
def isType(mime: MimeType): IO[Boolean] =
|
||||||
|
TikaMimetype.detect(delegate, MimeTypeHint.none).map(_ == mime)
|
||||||
|
|
||||||
|
def isPDF: IO[Boolean] =
|
||||||
|
isType(MimeType.pdf)
|
||||||
|
|
||||||
|
def isUnencryptedPDF: IO[Boolean] =
|
||||||
|
delegate.compile
|
||||||
|
.to(Array)
|
||||||
|
.map(PDDocument.load(_))
|
||||||
|
.map(_.close())
|
||||||
|
.map(_ => true)
|
||||||
|
|
||||||
|
def isEncryptedPDF: IO[Boolean] =
|
||||||
|
delegate.compile
|
||||||
|
.to(Array)
|
||||||
|
.map(PDDocument.load(_))
|
||||||
|
.attempt
|
||||||
|
.map(e =>
|
||||||
|
e.fold(
|
||||||
|
_.isInstanceOf[InvalidPasswordException],
|
||||||
|
doc => {
|
||||||
|
doc.close();
|
||||||
|
false
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
def storeFile(file: Path): Pipe[IO, Byte, Path] =
|
def storeFile(file: Path): Pipe[IO, Byte, Path] =
|
||||||
in =>
|
fs2.io.file.Files[IO].writeAll(file).andThen(s => s ++ Stream.emit(file))
|
||||||
Stream
|
|
||||||
.eval(
|
|
||||||
in.compile.to(Array).flatMap(bytes => IO(Files.write(file.toNioPath, bytes)))
|
|
||||||
)
|
|
||||||
.map(p => File.path(p))
|
|
||||||
|
|
||||||
def storePdfHandler(file: Path): Handler[IO, Path] =
|
def storePdfHandler(file: Path): Handler[IO, Path] =
|
||||||
storePdfTxtHandler(file, file.resolveSibling("unexpected.txt")).map(_._1)
|
storePdfTxtHandler(file, file.resolveSibling("unexpected.txt")).map(_._1)
|
||||||
|
@ -0,0 +1,82 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.convert
|
||||||
|
|
||||||
|
import cats.effect.IO
|
||||||
|
import fs2.Stream
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.files.ExampleFiles
|
||||||
|
|
||||||
|
import munit.CatsEffectSuite
|
||||||
|
|
||||||
|
class RemovePdfEncryptionTest extends CatsEffectSuite with FileChecks {
|
||||||
|
val logger: Logger[IO] = Logger.log4s(org.log4s.getLogger)
|
||||||
|
|
||||||
|
private val protectedPdf =
|
||||||
|
ExampleFiles.secured_protected_test123_pdf.readURL[IO](16 * 1024)
|
||||||
|
private val encryptedPdf =
|
||||||
|
ExampleFiles.secured_encrypted_test123_pdf.readURL[IO](16 * 1024)
|
||||||
|
private val plainPdf = ExampleFiles.letter_en_pdf.readURL[IO](16 * 1024)
|
||||||
|
|
||||||
|
test("have encrypted pdfs") {
|
||||||
|
for {
|
||||||
|
_ <- assertIO(encryptedPdf.isEncryptedPDF, true)
|
||||||
|
_ <- assertIO(encryptedPdf.isEncryptedPDF, true)
|
||||||
|
} yield ()
|
||||||
|
}
|
||||||
|
|
||||||
|
test("decrypt pdf") {
|
||||||
|
encryptedPdf
|
||||||
|
.through(RemovePdfEncryption(logger, List(Password("test123"))))
|
||||||
|
.isUnencryptedPDF
|
||||||
|
.map(assert(_))
|
||||||
|
}
|
||||||
|
|
||||||
|
test("decrypt pdf with multiple passwords") {
|
||||||
|
encryptedPdf
|
||||||
|
.through(
|
||||||
|
RemovePdfEncryption(
|
||||||
|
logger,
|
||||||
|
List("xy123", "123xy", "test123", "abc123").map(Password(_))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.isUnencryptedPDF
|
||||||
|
.map(assert(_))
|
||||||
|
}
|
||||||
|
|
||||||
|
test("remove protection") {
|
||||||
|
protectedPdf
|
||||||
|
.through(RemovePdfEncryption(logger, Nil))
|
||||||
|
.isUnencryptedPDF
|
||||||
|
.map(assert(_))
|
||||||
|
}
|
||||||
|
|
||||||
|
test("read unprotected pdf") {
|
||||||
|
plainPdf
|
||||||
|
.through(RemovePdfEncryption(logger, Nil))
|
||||||
|
.isUnencryptedPDF
|
||||||
|
.map(assert(_))
|
||||||
|
}
|
||||||
|
|
||||||
|
test("decrypt with multiple passwords, stop on first") {
|
||||||
|
val passwords: Stream[IO, String] =
|
||||||
|
Stream("test123") ++ Stream.raiseError[IO](new Exception("is not called"))
|
||||||
|
val decrypt = RemovePdfEncryption(logger, passwords.map(Password(_)))
|
||||||
|
encryptedPdf
|
||||||
|
.through(decrypt)
|
||||||
|
.isUnencryptedPDF
|
||||||
|
.map(assert(_))
|
||||||
|
}
|
||||||
|
|
||||||
|
test("return input stream if nothing helps") {
|
||||||
|
encryptedPdf
|
||||||
|
.through(RemovePdfEncryption(logger, List("a", "b").map(Password(_))))
|
||||||
|
.isEncryptedPDF
|
||||||
|
.map(assert(_))
|
||||||
|
}
|
||||||
|
}
|
BIN
modules/files/src/test/resources/secured/encrypted-test123.pdf
Normal file
BIN
modules/files/src/test/resources/secured/encrypted-test123.pdf
Normal file
Binary file not shown.
BIN
modules/files/src/test/resources/secured/protected-test123.pdf
Normal file
BIN
modules/files/src/test/resources/secured/protected-test123.pdf
Normal file
Binary file not shown.
@ -20,14 +20,19 @@ docspell.joex {
|
|||||||
|
|
||||||
# The database connection.
|
# The database connection.
|
||||||
#
|
#
|
||||||
# By default a H2 file-based database is configured. You can provide
|
|
||||||
# a postgresql or mariadb connection here. When using H2 use the
|
|
||||||
# PostgreSQL compatibility mode and AUTO_SERVER feature.
|
|
||||||
#
|
|
||||||
# It must be the same connection as the rest server is using.
|
# It must be the same connection as the rest server is using.
|
||||||
jdbc {
|
jdbc {
|
||||||
|
|
||||||
|
# The JDBC url to the database. By default a H2 file-based
|
||||||
|
# database is configured. You can provide a postgresql or mariadb
|
||||||
|
# connection here. When using H2 use the PostgreSQL compatibility
|
||||||
|
# mode and AUTO_SERVER feature.
|
||||||
url = "jdbc:h2://"${java.io.tmpdir}"/docspell-demo.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE;AUTO_SERVER=TRUE"
|
url = "jdbc:h2://"${java.io.tmpdir}"/docspell-demo.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE;AUTO_SERVER=TRUE"
|
||||||
|
|
||||||
|
# The database user.
|
||||||
user = "sa"
|
user = "sa"
|
||||||
|
|
||||||
|
# The database password.
|
||||||
password = ""
|
password = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -586,6 +591,25 @@ Docpell Update Check
|
|||||||
}
|
}
|
||||||
working-dir = ${java.io.tmpdir}"/docspell-convert"
|
working-dir = ${java.io.tmpdir}"/docspell-convert"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Allows to try to decrypt a PDF with encryption or protection. If
|
||||||
|
# enabled, a PDFs encryption or protection will be removed during
|
||||||
|
# conversion.
|
||||||
|
#
|
||||||
|
# For encrypted PDFs, this is necessary to be processed, because
|
||||||
|
# docspell needs to read it. It also requires to specify a
|
||||||
|
# password here. All passwords are tried when reading a PDF.
|
||||||
|
#
|
||||||
|
# This is enabled by default with an empty password list. This
|
||||||
|
# removes protection from PDFs, which is better for processing.
|
||||||
|
#
|
||||||
|
# Passwords can be given here and each collective can maintain
|
||||||
|
# their passwords as well. But if the `enabled` setting below is
|
||||||
|
# `false`, then no attempt at decrypting is done.
|
||||||
|
decrypt-pdf = {
|
||||||
|
enabled = true
|
||||||
|
passwords = []
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# The same section is also present in the rest-server config. It is
|
# The same section is also present in the rest-server config. It is
|
||||||
|
@ -8,9 +8,12 @@ package docspell.joex
|
|||||||
|
|
||||||
import cats.data.Validated
|
import cats.data.Validated
|
||||||
import cats.data.ValidatedNec
|
import cats.data.ValidatedNec
|
||||||
|
import cats.effect.Async
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
import docspell.common.config.Implicits._
|
import docspell.common.Logger
|
||||||
|
import docspell.config.ConfigFactory
|
||||||
|
import docspell.config.Implicits._
|
||||||
import docspell.joex.scheduler.CountingScheme
|
import docspell.joex.scheduler.CountingScheme
|
||||||
|
|
||||||
import emil.MailAddress
|
import emil.MailAddress
|
||||||
@ -22,8 +25,12 @@ import yamusca.imports._
|
|||||||
object ConfigFile {
|
object ConfigFile {
|
||||||
import Implicits._
|
import Implicits._
|
||||||
|
|
||||||
def loadConfig: Config =
|
def loadConfig[F[_]: Async](args: List[String]): F[Config] = {
|
||||||
validOrThrow(ConfigSource.default.at("docspell.joex").loadOrThrow[Config])
|
val logger = Logger.log4s[F](org.log4s.getLogger)
|
||||||
|
ConfigFactory
|
||||||
|
.default[F, Config](logger, "docspell.joex")(args)
|
||||||
|
.map(cfg => validOrThrow(cfg))
|
||||||
|
}
|
||||||
|
|
||||||
private def validOrThrow(cfg: Config): Config =
|
private def validOrThrow(cfg: Config): Config =
|
||||||
validate(cfg).fold(err => sys.error(err.toList.mkString("- ", "\n", "")), identity)
|
validate(cfg).fold(err => sys.error(err.toList.mkString("- ", "\n", "")), identity)
|
||||||
|
@ -116,11 +116,10 @@ object JoexAppImpl {
|
|||||||
def create[F[_]: Async](
|
def create[F[_]: Async](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
termSignal: SignallingRef[F, Boolean],
|
termSignal: SignallingRef[F, Boolean],
|
||||||
connectEC: ExecutionContext,
|
connectEC: ExecutionContext
|
||||||
clientEC: ExecutionContext
|
|
||||||
): Resource[F, JoexApp[F]] =
|
): Resource[F, JoexApp[F]] =
|
||||||
for {
|
for {
|
||||||
httpClient <- BlazeClientBuilder[F](clientEC).resource
|
httpClient <- BlazeClientBuilder[F].resource
|
||||||
client = JoexClient(httpClient)
|
client = JoexClient(httpClient)
|
||||||
store <- Store.create(cfg.jdbc, cfg.files.chunkSize, connectEC)
|
store <- Store.create(cfg.jdbc, cfg.files.chunkSize, connectEC)
|
||||||
queue <- JobQueue(store)
|
queue <- JobQueue(store)
|
||||||
|
@ -33,9 +33,7 @@ object JoexServer {
|
|||||||
val app = for {
|
val app = for {
|
||||||
signal <- Resource.eval(SignallingRef[F, Boolean](false))
|
signal <- Resource.eval(SignallingRef[F, Boolean](false))
|
||||||
exitCode <- Resource.eval(Ref[F].of(ExitCode.Success))
|
exitCode <- Resource.eval(Ref[F].of(ExitCode.Success))
|
||||||
joexApp <-
|
joexApp <- JoexAppImpl.create[F](cfg, signal, pools.connectEC)
|
||||||
JoexAppImpl
|
|
||||||
.create[F](cfg, signal, pools.connectEC, pools.httpClientEC)
|
|
||||||
|
|
||||||
httpApp = Router(
|
httpApp = Router(
|
||||||
"/api/info" -> InfoRoutes(cfg),
|
"/api/info" -> InfoRoutes(cfg),
|
||||||
@ -50,7 +48,7 @@ object JoexServer {
|
|||||||
Stream
|
Stream
|
||||||
.resource(app)
|
.resource(app)
|
||||||
.flatMap(app =>
|
.flatMap(app =>
|
||||||
BlazeServerBuilder[F](pools.restEC)
|
BlazeServerBuilder[F]
|
||||||
.bindHttp(cfg.bind.port, cfg.bind.address)
|
.bindHttp(cfg.bind.port, cfg.bind.address)
|
||||||
.withHttpApp(app.httpApp)
|
.withHttpApp(app.httpApp)
|
||||||
.withoutBanner
|
.withoutBanner
|
||||||
|
@ -6,46 +6,23 @@
|
|||||||
|
|
||||||
package docspell.joex
|
package docspell.joex
|
||||||
|
|
||||||
import java.nio.file.{Files, Paths}
|
|
||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
|
||||||
import org.log4s._
|
import org.log4s.getLogger
|
||||||
|
|
||||||
object Main extends IOApp {
|
object Main extends IOApp {
|
||||||
private[this] val logger = getLogger
|
|
||||||
|
|
||||||
val blockingEC =
|
private val logger: Logger[IO] = Logger.log4s[IO](getLogger)
|
||||||
ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-joex-blocking"))
|
|
||||||
val connectEC =
|
private val connectEC =
|
||||||
ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-joex-dbconnect"))
|
ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-joex-dbconnect"))
|
||||||
val restserverEC =
|
|
||||||
ThreadFactories.workSteal[IO](ThreadFactories.ofNameFJ("docspell-joex-server"))
|
|
||||||
|
|
||||||
def run(args: List[String]) = {
|
def run(args: List[String]): IO[ExitCode] =
|
||||||
args match {
|
for {
|
||||||
case file :: Nil =>
|
cfg <- ConfigFile.loadConfig[IO](args)
|
||||||
val path = Paths.get(file).toAbsolutePath.normalize
|
banner = Banner(
|
||||||
logger.info(s"Using given config file: $path")
|
|
||||||
System.setProperty("config.file", file)
|
|
||||||
case _ =>
|
|
||||||
Option(System.getProperty("config.file")) match {
|
|
||||||
case Some(f) if f.nonEmpty =>
|
|
||||||
val path = Paths.get(f).toAbsolutePath.normalize
|
|
||||||
if (!Files.exists(path)) {
|
|
||||||
logger.info(s"Not using config file '$f' because it doesn't exist")
|
|
||||||
System.clearProperty("config.file")
|
|
||||||
} else
|
|
||||||
logger.info(s"Using config file from system properties: $f")
|
|
||||||
case _ =>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
val cfg = ConfigFile.loadConfig
|
|
||||||
val banner = Banner(
|
|
||||||
"JOEX",
|
"JOEX",
|
||||||
BuildInfo.version,
|
BuildInfo.version,
|
||||||
BuildInfo.gitHeadCommit,
|
BuildInfo.gitHeadCommit,
|
||||||
@ -55,22 +32,19 @@ object Main extends IOApp {
|
|||||||
cfg.baseUrl,
|
cfg.baseUrl,
|
||||||
Some(cfg.fullTextSearch.solr.url).filter(_ => cfg.fullTextSearch.enabled)
|
Some(cfg.fullTextSearch.solr.url).filter(_ => cfg.fullTextSearch.enabled)
|
||||||
)
|
)
|
||||||
logger.info(s"\n${banner.render("***>")}")
|
_ <- logger.info(s"\n${banner.render("***>")}")
|
||||||
|
_ <-
|
||||||
if (EnvMode.current.isDev) {
|
if (EnvMode.current.isDev) {
|
||||||
logger.warn(">>>>> Docspell is running in DEV mode! <<<<<")
|
logger.warn(">>>>> Docspell is running in DEV mode! <<<<<")
|
||||||
}
|
} else IO(())
|
||||||
|
|
||||||
val pools = for {
|
pools = connectEC.map(Pools.apply)
|
||||||
cec <- connectEC
|
rc <- pools.use(p =>
|
||||||
bec <- blockingEC
|
|
||||||
rec <- restserverEC
|
|
||||||
} yield Pools(cec, bec, rec)
|
|
||||||
pools.use(p =>
|
|
||||||
JoexServer
|
JoexServer
|
||||||
.stream[IO](cfg, p)
|
.stream[IO](cfg, p)
|
||||||
.compile
|
.compile
|
||||||
.drain
|
.drain
|
||||||
.as(ExitCode.Success)
|
.as(ExitCode.Success)
|
||||||
)
|
)
|
||||||
}
|
} yield rc
|
||||||
}
|
}
|
||||||
|
@ -26,7 +26,7 @@ object CheckNodesTask {
|
|||||||
for {
|
for {
|
||||||
_ <- ctx.logger.info("Check nodes reachability")
|
_ <- ctx.logger.info("Check nodes reachability")
|
||||||
ec = scala.concurrent.ExecutionContext.global
|
ec = scala.concurrent.ExecutionContext.global
|
||||||
_ <- BlazeClientBuilder[F](ec).resource.use { client =>
|
_ <- BlazeClientBuilder[F].withExecutionContext(ec).resource.use { client =>
|
||||||
checkNodes(ctx, client)
|
checkNodes(ctx, client)
|
||||||
}
|
}
|
||||||
_ <- ctx.logger.info(
|
_ <- ctx.logger.info(
|
||||||
|
@ -77,17 +77,27 @@ object ConvertPdf {
|
|||||||
ctx: Context[F, ProcessItemArgs],
|
ctx: Context[F, ProcessItemArgs],
|
||||||
item: ItemData
|
item: ItemData
|
||||||
)(ra: RAttachment, mime: MimeType): F[(RAttachment, Option[RAttachmentMeta])] =
|
)(ra: RAttachment, mime: MimeType): F[(RAttachment, Option[RAttachmentMeta])] =
|
||||||
Conversion.create[F](cfg, sanitizeHtml, ctx.logger).use { conv =>
|
loadCollectivePasswords(ctx).flatMap(collPass =>
|
||||||
|
Conversion.create[F](cfg, sanitizeHtml, collPass, ctx.logger).use { conv =>
|
||||||
mime match {
|
mime match {
|
||||||
case mt =>
|
case mt =>
|
||||||
val data = ctx.store.fileStore.getBytes(ra.fileId)
|
val data = ctx.store.fileStore.getBytes(ra.fileId)
|
||||||
val handler = conversionHandler[F](ctx, cfg, ra, item)
|
val handler = conversionHandler[F](ctx, cfg, ra, item)
|
||||||
ctx.logger.info(s"Converting file ${ra.name} (${mime.asString}) into a PDF") *>
|
ctx.logger
|
||||||
|
.info(s"Converting file ${ra.name} (${mime.asString}) into a PDF") *>
|
||||||
conv.toPDF(DataType(mt), ctx.args.meta.language, handler)(
|
conv.toPDF(DataType(mt), ctx.args.meta.language, handler)(
|
||||||
data
|
data
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
private def loadCollectivePasswords[F[_]: Async](
|
||||||
|
ctx: Context[F, ProcessItemArgs]
|
||||||
|
): F[List[Password]] =
|
||||||
|
ctx.store
|
||||||
|
.transact(RCollectivePassword.findAll(ctx.args.meta.collective))
|
||||||
|
.map(_.map(_.password).distinct)
|
||||||
|
|
||||||
private def conversionHandler[F[_]: Sync](
|
private def conversionHandler[F[_]: Sync](
|
||||||
ctx: Context[F, ProcessItemArgs],
|
ctx: Context[F, ProcessItemArgs],
|
||||||
|
@ -2,7 +2,7 @@ openapi: 3.0.0
|
|||||||
|
|
||||||
info:
|
info:
|
||||||
title: Docspell JOEX
|
title: Docspell JOEX
|
||||||
version: 0.27.0
|
version: 0.28.0-SNAPSHOT
|
||||||
description: |
|
description: |
|
||||||
This is the remote API to the job executor component of Docspell.
|
This is the remote API to the job executor component of Docspell.
|
||||||
Docspell is a free document management system focused on small
|
Docspell is a free document management system focused on small
|
||||||
|
@ -6,8 +6,6 @@
|
|||||||
|
|
||||||
package docspell.joexapi.client
|
package docspell.joexapi.client
|
||||||
|
|
||||||
import scala.concurrent.ExecutionContext
|
|
||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
@ -69,6 +67,6 @@ object JoexClient {
|
|||||||
Uri.unsafeFromString(u.asString)
|
Uri.unsafeFromString(u.asString)
|
||||||
}
|
}
|
||||||
|
|
||||||
def resource[F[_]: Async](ec: ExecutionContext): Resource[F, JoexClient[F]] =
|
def resource[F[_]: Async]: Resource[F, JoexClient[F]] =
|
||||||
BlazeClientBuilder[F](ec).resource.map(apply[F])
|
BlazeClientBuilder[F].resource.map(apply[F])
|
||||||
}
|
}
|
||||||
|
@ -123,9 +123,11 @@ object ItemQuery {
|
|||||||
final case class ChecksumMatch(checksum: String) extends Expr
|
final case class ChecksumMatch(checksum: String) extends Expr
|
||||||
final case class AttachId(id: String) extends Expr
|
final case class AttachId(id: String) extends Expr
|
||||||
|
|
||||||
final case object ValidItemStates extends Expr
|
/** A "private" expression is only visible in code, but cannot be parsed. */
|
||||||
final case object Trashed extends Expr
|
sealed trait PrivateExpr extends Expr
|
||||||
final case object ValidItemsOrTrashed extends Expr
|
final case object ValidItemStates extends PrivateExpr
|
||||||
|
final case object Trashed extends PrivateExpr
|
||||||
|
final case object ValidItemsOrTrashed extends PrivateExpr
|
||||||
|
|
||||||
// things that can be expressed with terms above
|
// things that can be expressed with terms above
|
||||||
sealed trait MacroExpr extends Expr {
|
sealed trait MacroExpr extends Expr {
|
||||||
@ -186,6 +188,10 @@ object ItemQuery {
|
|||||||
|
|
||||||
def date(op: Operator, attr: DateAttr, value: Date): SimpleExpr =
|
def date(op: Operator, attr: DateAttr, value: Date): SimpleExpr =
|
||||||
SimpleExpr(op, Property(attr, value))
|
SimpleExpr(op, Property(attr, value))
|
||||||
|
|
||||||
|
def itemIdEq(itemId1: String, moreIds: String*): Expr =
|
||||||
|
if (moreIds.isEmpty) string(Operator.Eq, Attr.ItemId, itemId1)
|
||||||
|
else InExpr(Attr.ItemId, Nel(itemId1, moreIds.toList))
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -8,12 +8,23 @@ package docspell.query
|
|||||||
|
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
|
|
||||||
import docspell.query.internal.ExprParser
|
import docspell.query.internal.{ExprParser, ExprString, ExprUtil}
|
||||||
import docspell.query.internal.ExprUtil
|
|
||||||
|
|
||||||
object ItemQueryParser {
|
object ItemQueryParser {
|
||||||
|
|
||||||
|
val PrivateExprError = ExprString.PrivateExprError
|
||||||
|
type PrivateExprError = ExprString.PrivateExprError
|
||||||
|
|
||||||
def parse(input: String): Either[ParseFailure, ItemQuery] =
|
def parse(input: String): Either[ParseFailure, ItemQuery] =
|
||||||
|
parse0(input, expandMacros = true)
|
||||||
|
|
||||||
|
def parseKeepMacros(input: String): Either[ParseFailure, ItemQuery] =
|
||||||
|
parse0(input, expandMacros = false)
|
||||||
|
|
||||||
|
private def parse0(
|
||||||
|
input: String,
|
||||||
|
expandMacros: Boolean
|
||||||
|
): Either[ParseFailure, ItemQuery] =
|
||||||
if (input.isEmpty)
|
if (input.isEmpty)
|
||||||
Left(
|
Left(
|
||||||
ParseFailure("", 0, NonEmptyList.of(ParseFailure.SimpleMessage(0, "No input.")))
|
ParseFailure("", 0, NonEmptyList.of(ParseFailure.SimpleMessage(0, "No input.")))
|
||||||
@ -24,9 +35,16 @@ object ItemQueryParser {
|
|||||||
.parseQuery(in)
|
.parseQuery(in)
|
||||||
.left
|
.left
|
||||||
.map(ParseFailure.fromError(in))
|
.map(ParseFailure.fromError(in))
|
||||||
.map(q => q.copy(expr = ExprUtil.reduce(q.expr)))
|
.map(q => q.copy(expr = ExprUtil.reduce(expandMacros)(q.expr)))
|
||||||
}
|
}
|
||||||
|
|
||||||
def parseUnsafe(input: String): ItemQuery =
|
def parseUnsafe(input: String): ItemQuery =
|
||||||
parse(input).fold(m => sys.error(m.render), identity)
|
parse(input).fold(m => sys.error(m.render), identity)
|
||||||
|
|
||||||
|
def asString(q: ItemQuery.Expr): Either[PrivateExprError, String] =
|
||||||
|
ExprString(q)
|
||||||
|
|
||||||
|
def unsafeAsString(q: ItemQuery.Expr): String =
|
||||||
|
asString(q).fold(f => sys.error(s"Cannot expose private query part: $f"), identity)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ object BasicParser {
|
|||||||
)
|
)
|
||||||
|
|
||||||
private[this] val identChars: Set[Char] =
|
private[this] val identChars: Set[Char] =
|
||||||
(('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9') ++ "-_.").toSet
|
(('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9') ++ "-_.@").toSet
|
||||||
|
|
||||||
val parenAnd: P[Unit] =
|
val parenAnd: P[Unit] =
|
||||||
P.stringIn(List("(&", "(and")).void <* ws0
|
P.stringIn(List("(&", "(and")).void <* ws0
|
||||||
|
@ -0,0 +1,244 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.query.internal
|
||||||
|
|
||||||
|
import java.time.Period
|
||||||
|
|
||||||
|
import docspell.query.Date
|
||||||
|
import docspell.query.Date.DateLiteral
|
||||||
|
import docspell.query.ItemQuery.Attr._
|
||||||
|
import docspell.query.ItemQuery.Expr._
|
||||||
|
import docspell.query.ItemQuery._
|
||||||
|
import docspell.query.internal.{Constants => C}
|
||||||
|
|
||||||
|
/** Creates the string representation for a given expression. The returned string can be
|
||||||
|
* parsed back to the expression using `ExprParser`. Note that expressions obtained from
|
||||||
|
* the `ItemQueryParser` have macros already expanded.
|
||||||
|
*
|
||||||
|
* It may fail when the expression contains non-public parts. Every expression that has
|
||||||
|
* been created by parsing a string, can be transformed back to a string. But an
|
||||||
|
* expression created via code may contain parts that cannot be transformed to a string.
|
||||||
|
*/
|
||||||
|
object ExprString {
|
||||||
|
|
||||||
|
final case class PrivateExprError(expr: Expr.PrivateExpr)
|
||||||
|
type Result = Either[PrivateExprError, String]
|
||||||
|
|
||||||
|
def apply(expr: Expr): Result =
|
||||||
|
expr match {
|
||||||
|
case AndExpr(inner) =>
|
||||||
|
val es = inner.traverse(ExprString.apply)
|
||||||
|
es.map(_.toList.mkString(" ")).map(els => s"(& $els )")
|
||||||
|
|
||||||
|
case OrExpr(inner) =>
|
||||||
|
val es = inner.traverse(ExprString.apply)
|
||||||
|
es.map(_.toList.mkString(" ")).map(els => s"(| $els )")
|
||||||
|
|
||||||
|
case NotExpr(inner) =>
|
||||||
|
inner match {
|
||||||
|
case NotExpr(inner2) =>
|
||||||
|
apply(inner2)
|
||||||
|
case _ =>
|
||||||
|
apply(inner).map(n => s"!$n")
|
||||||
|
}
|
||||||
|
|
||||||
|
case m: MacroExpr =>
|
||||||
|
Right(macroStr(m))
|
||||||
|
|
||||||
|
case DirectionExpr(v) =>
|
||||||
|
Right(s"${C.incoming}${C.like}${v}")
|
||||||
|
|
||||||
|
case InboxExpr(v) =>
|
||||||
|
Right(s"${C.inbox}${C.like}${v}")
|
||||||
|
|
||||||
|
case InExpr(attr, values) =>
|
||||||
|
val els = values.map(quote).toList.mkString(",")
|
||||||
|
Right(s"${attrStr(attr)}${C.in}$els")
|
||||||
|
|
||||||
|
case InDateExpr(attr, values) =>
|
||||||
|
val els = values.map(dateStr).toList.mkString(",")
|
||||||
|
Right(s"${attrStr(attr)}${C.in}$els")
|
||||||
|
|
||||||
|
case TagsMatch(op, values) =>
|
||||||
|
val els = values.map(quote).toList.mkString(",")
|
||||||
|
Right(s"${C.tag}${tagOpStr(op)}$els")
|
||||||
|
|
||||||
|
case TagIdsMatch(op, values) =>
|
||||||
|
val els = values.map(quote).toList.mkString(",")
|
||||||
|
Right(s"${C.tagId}${tagOpStr(op)}$els")
|
||||||
|
|
||||||
|
case Exists(field) =>
|
||||||
|
Right(s"${C.exist}${C.like}${attrStr(field)}")
|
||||||
|
|
||||||
|
case Fulltext(v) =>
|
||||||
|
Right(s"${C.content}${C.like}${quote(v)}")
|
||||||
|
|
||||||
|
case SimpleExpr(op, prop) =>
|
||||||
|
prop match {
|
||||||
|
case Property.StringProperty(attr, value) =>
|
||||||
|
Right(s"${stringAttr(attr)}${opStr(op)}${quote(value)}")
|
||||||
|
case Property.DateProperty(attr, value) =>
|
||||||
|
Right(s"${dateAttr(attr)}${opStr(op)}${dateStr(value)}")
|
||||||
|
case Property.IntProperty(attr, value) =>
|
||||||
|
Right(s"${attrStr(attr)}${opStr(op)}$value")
|
||||||
|
}
|
||||||
|
|
||||||
|
case TagCategoryMatch(op, values) =>
|
||||||
|
val els = values.map(quote).toList.mkString(",")
|
||||||
|
Right(s"${C.cat}${tagOpStr(op)}$els")
|
||||||
|
|
||||||
|
case CustomFieldMatch(name, op, value) =>
|
||||||
|
Right(s"${C.customField}:$name${opStr(op)}${quote(value)}")
|
||||||
|
|
||||||
|
case CustomFieldIdMatch(id, op, value) =>
|
||||||
|
Right(s"${C.customFieldId}:$id${opStr(op)}${quote(value)}")
|
||||||
|
|
||||||
|
case ChecksumMatch(cs) =>
|
||||||
|
Right(s"${C.checksum}${C.like}$cs")
|
||||||
|
|
||||||
|
case AttachId(aid) =>
|
||||||
|
Right(s"${C.attachId}${C.eqs}$aid")
|
||||||
|
|
||||||
|
case pe: PrivateExpr =>
|
||||||
|
// There is no parser equivalent for this
|
||||||
|
Left(PrivateExprError(pe))
|
||||||
|
}
|
||||||
|
|
||||||
|
private[internal] def macroStr(expr: Expr.MacroExpr): String =
|
||||||
|
expr match {
|
||||||
|
case Expr.NamesMacro(name) =>
|
||||||
|
s"${C.names}:${quote(name)}"
|
||||||
|
case Expr.YearMacro(_, year) =>
|
||||||
|
s"${C.year}:$year" //currently, only for Attr.Date
|
||||||
|
case Expr.ConcMacro(term) =>
|
||||||
|
s"${C.conc}:${quote(term)}"
|
||||||
|
case Expr.CorrMacro(term) =>
|
||||||
|
s"${C.corr}:${quote(term)}"
|
||||||
|
case Expr.DateRangeMacro(attr, left, right) =>
|
||||||
|
val name = attr match {
|
||||||
|
case Attr.CreatedDate =>
|
||||||
|
C.createdIn
|
||||||
|
case Attr.Date =>
|
||||||
|
C.dateIn
|
||||||
|
case Attr.DueDate =>
|
||||||
|
C.dueIn
|
||||||
|
}
|
||||||
|
(left, right) match {
|
||||||
|
case (_: Date.DateLiteral, Date.Calc(date, calc, period)) =>
|
||||||
|
s"$name:${dateStr(date)};${calcStr(calc)}${periodStr(period)}"
|
||||||
|
|
||||||
|
case (Date.Calc(date, calc, period), _: DateLiteral) =>
|
||||||
|
s"$name:${dateStr(date)};${calcStr(calc)}${periodStr(period)}"
|
||||||
|
|
||||||
|
case (Date.Calc(d1, _, p1), Date.Calc(_, _, _)) =>
|
||||||
|
s"$name:${dateStr(d1)};/${periodStr(p1)}"
|
||||||
|
|
||||||
|
case (_: DateLiteral, _: DateLiteral) =>
|
||||||
|
sys.error("Invalid date range")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private[internal] def dateStr(date: Date): String =
|
||||||
|
date match {
|
||||||
|
case Date.Today =>
|
||||||
|
"today"
|
||||||
|
case Date.Local(ld) =>
|
||||||
|
f"${ld.getYear}-${ld.getMonthValue}%02d-${ld.getDayOfMonth}%02d"
|
||||||
|
|
||||||
|
case Date.Millis(ms) =>
|
||||||
|
s"ms$ms"
|
||||||
|
|
||||||
|
case Date.Calc(date, calc, period) =>
|
||||||
|
val ds = dateStr(date)
|
||||||
|
s"$ds;${calcStr(calc)}${periodStr(period)}"
|
||||||
|
}
|
||||||
|
|
||||||
|
private[internal] def calcStr(c: Date.CalcDirection): String =
|
||||||
|
c match {
|
||||||
|
case Date.CalcDirection.Plus => "+"
|
||||||
|
case Date.CalcDirection.Minus => "-"
|
||||||
|
}
|
||||||
|
|
||||||
|
private[internal] def periodStr(p: Period): String =
|
||||||
|
if (p.toTotalMonths == 0) s"${p.getDays}d"
|
||||||
|
else s"${p.toTotalMonths}m"
|
||||||
|
|
||||||
|
private[internal] def attrStr(attr: Attr): String =
|
||||||
|
attr match {
|
||||||
|
case a: StringAttr => stringAttr(a)
|
||||||
|
case a: DateAttr => dateAttr(a)
|
||||||
|
case a: IntAttr => intAttr(a)
|
||||||
|
}
|
||||||
|
|
||||||
|
private[internal] def intAttr(attr: IntAttr): String =
|
||||||
|
attr match {
|
||||||
|
case AttachCount =>
|
||||||
|
Constants.attachCount
|
||||||
|
}
|
||||||
|
|
||||||
|
private[internal] def dateAttr(attr: DateAttr): String =
|
||||||
|
attr match {
|
||||||
|
case Attr.Date =>
|
||||||
|
Constants.date
|
||||||
|
case DueDate =>
|
||||||
|
Constants.due
|
||||||
|
case CreatedDate =>
|
||||||
|
Constants.created
|
||||||
|
}
|
||||||
|
|
||||||
|
private[internal] def stringAttr(attr: StringAttr): String =
|
||||||
|
attr match {
|
||||||
|
case Attr.ItemName =>
|
||||||
|
Constants.name
|
||||||
|
case Attr.ItemId =>
|
||||||
|
Constants.id
|
||||||
|
case Attr.ItemSource =>
|
||||||
|
Constants.source
|
||||||
|
case Attr.ItemNotes =>
|
||||||
|
Constants.notes
|
||||||
|
case Correspondent.OrgId =>
|
||||||
|
Constants.corrOrgId
|
||||||
|
case Correspondent.OrgName =>
|
||||||
|
Constants.corrOrgName
|
||||||
|
case Correspondent.PersonId =>
|
||||||
|
Constants.corrPersId
|
||||||
|
case Correspondent.PersonName =>
|
||||||
|
Constants.corrPersName
|
||||||
|
case Concerning.EquipId =>
|
||||||
|
Constants.concEquipId
|
||||||
|
case Concerning.EquipName =>
|
||||||
|
Constants.concEquipName
|
||||||
|
case Concerning.PersonId =>
|
||||||
|
Constants.concPersId
|
||||||
|
case Concerning.PersonName =>
|
||||||
|
Constants.concPersName
|
||||||
|
case Folder.FolderName =>
|
||||||
|
Constants.folder
|
||||||
|
case Folder.FolderId =>
|
||||||
|
Constants.folderId
|
||||||
|
}
|
||||||
|
|
||||||
|
private[internal] def opStr(op: Operator): String =
|
||||||
|
op match {
|
||||||
|
case Operator.Like => Constants.like.toString
|
||||||
|
case Operator.Gte => Constants.gte
|
||||||
|
case Operator.Lte => Constants.lte
|
||||||
|
case Operator.Eq => Constants.eqs.toString
|
||||||
|
case Operator.Lt => Constants.lt.toString
|
||||||
|
case Operator.Gt => Constants.gt.toString
|
||||||
|
case Operator.Neq => Constants.neq
|
||||||
|
}
|
||||||
|
|
||||||
|
private[internal] def tagOpStr(op: TagOperator): String =
|
||||||
|
op match {
|
||||||
|
case TagOperator.AllMatch => C.eqs.toString
|
||||||
|
case TagOperator.AnyMatch => C.like.toString
|
||||||
|
}
|
||||||
|
|
||||||
|
private def quote(s: String): String =
|
||||||
|
s"\"$s\""
|
||||||
|
}
|
@ -13,35 +13,42 @@ import docspell.query.ItemQuery._
|
|||||||
|
|
||||||
object ExprUtil {
|
object ExprUtil {
|
||||||
|
|
||||||
|
def reduce(expr: Expr): Expr =
|
||||||
|
reduce(expandMacros = true)(expr)
|
||||||
|
|
||||||
/** Does some basic transformation, like unfolding nested and trees containing one value
|
/** Does some basic transformation, like unfolding nested and trees containing one value
|
||||||
* etc.
|
* etc.
|
||||||
*/
|
*/
|
||||||
def reduce(expr: Expr): Expr =
|
def reduce(expandMacros: Boolean)(expr: Expr): Expr =
|
||||||
expr match {
|
expr match {
|
||||||
case AndExpr(inner) =>
|
case AndExpr(inner) =>
|
||||||
val nodes = spliceAnd(inner)
|
val nodes = spliceAnd(inner)
|
||||||
if (nodes.tail.isEmpty) reduce(nodes.head)
|
if (nodes.tail.isEmpty) reduce(expandMacros)(nodes.head)
|
||||||
else AndExpr(nodes.map(reduce))
|
else AndExpr(nodes.map(reduce(expandMacros)))
|
||||||
|
|
||||||
case OrExpr(inner) =>
|
case OrExpr(inner) =>
|
||||||
val nodes = spliceOr(inner)
|
val nodes = spliceOr(inner)
|
||||||
if (nodes.tail.isEmpty) reduce(nodes.head)
|
if (nodes.tail.isEmpty) reduce(expandMacros)(nodes.head)
|
||||||
else OrExpr(nodes.map(reduce))
|
else OrExpr(nodes.map(reduce(expandMacros)))
|
||||||
|
|
||||||
case NotExpr(inner) =>
|
case NotExpr(inner) =>
|
||||||
inner match {
|
inner match {
|
||||||
case NotExpr(inner2) =>
|
case NotExpr(inner2) =>
|
||||||
reduce(inner2)
|
reduce(expandMacros)(inner2)
|
||||||
case InboxExpr(flag) =>
|
case InboxExpr(flag) =>
|
||||||
InboxExpr(!flag)
|
InboxExpr(!flag)
|
||||||
case DirectionExpr(flag) =>
|
case DirectionExpr(flag) =>
|
||||||
DirectionExpr(!flag)
|
DirectionExpr(!flag)
|
||||||
case _ =>
|
case _ =>
|
||||||
NotExpr(reduce(inner))
|
NotExpr(reduce(expandMacros)(inner))
|
||||||
}
|
}
|
||||||
|
|
||||||
case m: MacroExpr =>
|
case m: MacroExpr =>
|
||||||
reduce(m.body)
|
if (expandMacros) {
|
||||||
|
reduce(expandMacros)(m.body)
|
||||||
|
} else {
|
||||||
|
m
|
||||||
|
}
|
||||||
|
|
||||||
case DirectionExpr(_) =>
|
case DirectionExpr(_) =>
|
||||||
expr
|
expr
|
||||||
|
@ -0,0 +1,287 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.query
|
||||||
|
|
||||||
|
import java.time.{Instant, Period, ZoneOffset}
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
|
||||||
|
import docspell.query.ItemQuery.Expr.TagIdsMatch
|
||||||
|
import docspell.query.ItemQuery._
|
||||||
|
|
||||||
|
import org.scalacheck.Gen
|
||||||
|
|
||||||
|
/** Generator for syntactically valid queries. */
|
||||||
|
object ItemQueryGen {
|
||||||
|
|
||||||
|
def exprGen: Gen[Expr] =
|
||||||
|
Gen.oneOf(
|
||||||
|
simpleExprGen,
|
||||||
|
existsExprGen,
|
||||||
|
inExprGen,
|
||||||
|
inDateExprGen,
|
||||||
|
inboxExprGen,
|
||||||
|
directionExprGen,
|
||||||
|
tagIdsMatchExprGen,
|
||||||
|
tagMatchExprGen,
|
||||||
|
tagCatMatchExpr,
|
||||||
|
customFieldMatchExprGen,
|
||||||
|
customFieldIdMatchExprGen,
|
||||||
|
fulltextExprGen,
|
||||||
|
checksumMatchExprGen,
|
||||||
|
attachIdExprGen,
|
||||||
|
namesMacroGen,
|
||||||
|
corrMacroGen,
|
||||||
|
concMacroGen,
|
||||||
|
yearMacroGen,
|
||||||
|
dateRangeMacro,
|
||||||
|
Gen.lzy(andExprGen(exprGen)),
|
||||||
|
Gen.lzy(orExprGen(exprGen)),
|
||||||
|
Gen.lzy(notExprGen(exprGen))
|
||||||
|
)
|
||||||
|
|
||||||
|
def andExprGen(g: Gen[Expr]): Gen[Expr.AndExpr] =
|
||||||
|
nelGen(g).map(Expr.AndExpr)
|
||||||
|
|
||||||
|
def orExprGen(g: Gen[Expr]): Gen[Expr.OrExpr] =
|
||||||
|
nelGen(g).map(Expr.OrExpr)
|
||||||
|
|
||||||
|
// avoid generating nested not expressions, they are already flattened by the parser
|
||||||
|
// and only occur artificially
|
||||||
|
def notExprGen(g: Gen[Expr]): Gen[Expr] =
|
||||||
|
g.map {
|
||||||
|
case Expr.NotExpr(inner) => inner
|
||||||
|
case e => Expr.NotExpr(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
val opGen: Gen[Operator] =
|
||||||
|
Gen.oneOf(
|
||||||
|
Operator.Like,
|
||||||
|
Operator.Gte,
|
||||||
|
Operator.Lt,
|
||||||
|
Operator.Gt,
|
||||||
|
Operator.Lte,
|
||||||
|
Operator.Eq,
|
||||||
|
Operator.Neq
|
||||||
|
)
|
||||||
|
|
||||||
|
val tagOpGen: Gen[TagOperator] =
|
||||||
|
Gen.oneOf(TagOperator.AllMatch, TagOperator.AnyMatch)
|
||||||
|
|
||||||
|
val stringAttrGen: Gen[Attr.StringAttr] =
|
||||||
|
Gen.oneOf(
|
||||||
|
Attr.Concerning.EquipName,
|
||||||
|
Attr.Concerning.EquipId,
|
||||||
|
Attr.Concerning.PersonName,
|
||||||
|
Attr.Concerning.PersonId,
|
||||||
|
Attr.Correspondent.OrgName,
|
||||||
|
Attr.Correspondent.OrgId,
|
||||||
|
Attr.Correspondent.PersonName,
|
||||||
|
Attr.Correspondent.PersonId,
|
||||||
|
Attr.ItemId,
|
||||||
|
Attr.ItemName,
|
||||||
|
Attr.ItemSource,
|
||||||
|
Attr.ItemNotes,
|
||||||
|
Attr.Folder.FolderId,
|
||||||
|
Attr.Folder.FolderName
|
||||||
|
)
|
||||||
|
|
||||||
|
val dateAttrGen: Gen[Attr.DateAttr] =
|
||||||
|
Gen.oneOf(Attr.Date, Attr.DueDate, Attr.CreatedDate)
|
||||||
|
|
||||||
|
val intAttrGen: Gen[Attr.IntAttr] =
|
||||||
|
Gen.const(Attr.AttachCount)
|
||||||
|
|
||||||
|
val attrGen: Gen[Attr] =
|
||||||
|
Gen.oneOf(stringAttrGen, dateAttrGen, intAttrGen)
|
||||||
|
|
||||||
|
private val valueChars =
|
||||||
|
Gen.oneOf(Gen.alphaNumChar, Gen.oneOf(" /{}*?-:@#$~+%…_[]^!ß"))
|
||||||
|
|
||||||
|
private val stringValueGen: Gen[String] =
|
||||||
|
Gen.choose(1, 20).flatMap(n => Gen.stringOfN(n, valueChars))
|
||||||
|
|
||||||
|
private val intValueGen: Gen[Int] =
|
||||||
|
Gen.choose(1900, 9999)
|
||||||
|
|
||||||
|
private val identGen: Gen[String] =
|
||||||
|
Gen
|
||||||
|
.choose(3, 12)
|
||||||
|
.flatMap(n =>
|
||||||
|
Gen.stringOfN(
|
||||||
|
n,
|
||||||
|
Gen.oneOf((('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9') ++ "-_.@").toSet)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
private def nelGen[T](gen: Gen[T]): Gen[NonEmptyList[T]] =
|
||||||
|
for {
|
||||||
|
head <- gen
|
||||||
|
tail <- Gen.choose(0, 9).flatMap(n => Gen.listOfN(n, gen))
|
||||||
|
} yield NonEmptyList(head, tail)
|
||||||
|
|
||||||
|
private val dateMillisGen: Gen[Long] =
|
||||||
|
Gen.choose(0, Instant.parse("2100-12-24T20:00:00Z").toEpochMilli)
|
||||||
|
|
||||||
|
val localDateGen: Gen[Date.Local] =
|
||||||
|
dateMillisGen
|
||||||
|
.map(ms => Instant.ofEpochMilli(ms).atOffset(ZoneOffset.UTC).toLocalDate)
|
||||||
|
.map(Date.Local)
|
||||||
|
|
||||||
|
val millisDateGen: Gen[Date.Millis] =
|
||||||
|
dateMillisGen.map(Date.Millis)
|
||||||
|
|
||||||
|
val dateLiteralGen: Gen[Date.DateLiteral] =
|
||||||
|
Gen.oneOf(
|
||||||
|
localDateGen,
|
||||||
|
millisDateGen,
|
||||||
|
Gen.const(Date.Today)
|
||||||
|
)
|
||||||
|
|
||||||
|
val periodGen: Gen[Period] =
|
||||||
|
for {
|
||||||
|
mOrD <- Gen.oneOf(a => Period.ofDays(a), a => Period.ofMonths(a))
|
||||||
|
num <- Gen.choose(1, 30)
|
||||||
|
} yield mOrD(num)
|
||||||
|
|
||||||
|
val calcGen: Gen[Date.CalcDirection] =
|
||||||
|
Gen.oneOf(Date.CalcDirection.Plus, Date.CalcDirection.Minus)
|
||||||
|
|
||||||
|
val dateCalcGen: Gen[Date.Calc] =
|
||||||
|
for {
|
||||||
|
dl <- dateLiteralGen
|
||||||
|
calc <- calcGen
|
||||||
|
period <- periodGen
|
||||||
|
} yield Date.Calc(dl, calc, period)
|
||||||
|
|
||||||
|
val dateValueGen: Gen[Date] =
|
||||||
|
Gen.oneOf(dateLiteralGen, dateCalcGen)
|
||||||
|
|
||||||
|
val stringPropGen: Gen[Property.StringProperty] =
|
||||||
|
for {
|
||||||
|
attr <- stringAttrGen
|
||||||
|
sval <- stringValueGen
|
||||||
|
} yield Property.StringProperty(attr, sval)
|
||||||
|
|
||||||
|
val intPropGen: Gen[Property.IntProperty] =
|
||||||
|
for {
|
||||||
|
attr <- intAttrGen
|
||||||
|
ival <- intValueGen
|
||||||
|
} yield Property.IntProperty(attr, ival)
|
||||||
|
|
||||||
|
val datePropGen: Gen[Property.DateProperty] =
|
||||||
|
for {
|
||||||
|
attr <- dateAttrGen
|
||||||
|
dv <- dateValueGen
|
||||||
|
} yield Property.DateProperty(attr, dv)
|
||||||
|
|
||||||
|
val propertyGen: Gen[Property] =
|
||||||
|
Gen.oneOf(stringPropGen, datePropGen, intPropGen)
|
||||||
|
|
||||||
|
val simpleExprGen: Gen[Expr.SimpleExpr] =
|
||||||
|
for {
|
||||||
|
op <- opGen
|
||||||
|
prop <- propertyGen
|
||||||
|
} yield Expr.SimpleExpr(op, prop)
|
||||||
|
|
||||||
|
val existsExprGen: Gen[Expr.Exists] =
|
||||||
|
attrGen.map(Expr.Exists)
|
||||||
|
|
||||||
|
val inExprGen: Gen[Expr.InExpr] =
|
||||||
|
for {
|
||||||
|
attr <- stringAttrGen
|
||||||
|
vals <- nelGen(stringValueGen)
|
||||||
|
} yield Expr.InExpr(attr, vals)
|
||||||
|
|
||||||
|
val inDateExprGen: Gen[Expr.InDateExpr] =
|
||||||
|
for {
|
||||||
|
attr <- dateAttrGen
|
||||||
|
vals <- nelGen(dateValueGen)
|
||||||
|
} yield Expr.InDateExpr(attr, vals)
|
||||||
|
|
||||||
|
val inboxExprGen: Gen[Expr.InboxExpr] =
|
||||||
|
Gen.oneOf(true, false).map(Expr.InboxExpr)
|
||||||
|
|
||||||
|
val directionExprGen: Gen[Expr.DirectionExpr] =
|
||||||
|
Gen.oneOf(true, false).map(Expr.DirectionExpr)
|
||||||
|
|
||||||
|
val tagIdsMatchExprGen: Gen[Expr.TagIdsMatch] =
|
||||||
|
for {
|
||||||
|
op <- tagOpGen
|
||||||
|
vals <- nelGen(stringValueGen)
|
||||||
|
} yield TagIdsMatch(op, vals)
|
||||||
|
|
||||||
|
val tagMatchExprGen: Gen[Expr.TagsMatch] =
|
||||||
|
for {
|
||||||
|
op <- tagOpGen
|
||||||
|
vals <- nelGen(stringValueGen)
|
||||||
|
} yield Expr.TagsMatch(op, vals)
|
||||||
|
|
||||||
|
val tagCatMatchExpr: Gen[Expr.TagCategoryMatch] =
|
||||||
|
for {
|
||||||
|
op <- tagOpGen
|
||||||
|
vals <- nelGen(stringValueGen)
|
||||||
|
} yield Expr.TagCategoryMatch(op, vals)
|
||||||
|
|
||||||
|
val customFieldMatchExprGen: Gen[Expr.CustomFieldMatch] =
|
||||||
|
for {
|
||||||
|
name <- identGen
|
||||||
|
op <- opGen
|
||||||
|
value <- stringValueGen
|
||||||
|
} yield Expr.CustomFieldMatch(name, op, value)
|
||||||
|
|
||||||
|
val customFieldIdMatchExprGen: Gen[Expr.CustomFieldIdMatch] =
|
||||||
|
for {
|
||||||
|
name <- identGen
|
||||||
|
op <- opGen
|
||||||
|
value <- identGen
|
||||||
|
} yield Expr.CustomFieldIdMatch(name, op, value)
|
||||||
|
|
||||||
|
val fulltextExprGen: Gen[Expr.Fulltext] =
|
||||||
|
Gen
|
||||||
|
.choose(3, 20)
|
||||||
|
.flatMap(n => Gen.stringOfN(n, valueChars))
|
||||||
|
.map(Expr.Fulltext)
|
||||||
|
|
||||||
|
val checksumMatchExprGen: Gen[Expr.ChecksumMatch] =
|
||||||
|
Gen.stringOfN(64, Gen.hexChar).map(Expr.ChecksumMatch)
|
||||||
|
|
||||||
|
val attachIdExprGen: Gen[Expr.AttachId] =
|
||||||
|
identGen.map(Expr.AttachId)
|
||||||
|
|
||||||
|
val namesMacroGen: Gen[Expr.NamesMacro] =
|
||||||
|
stringValueGen.map(Expr.NamesMacro)
|
||||||
|
|
||||||
|
val concMacroGen: Gen[Expr.ConcMacro] =
|
||||||
|
stringValueGen.map(Expr.ConcMacro)
|
||||||
|
|
||||||
|
val corrMacroGen: Gen[Expr.CorrMacro] =
|
||||||
|
stringValueGen.map(Expr.CorrMacro)
|
||||||
|
|
||||||
|
val yearMacroGen: Gen[Expr.YearMacro] =
|
||||||
|
Gen.choose(1900, 9999).map(Expr.YearMacro(Attr.Date, _))
|
||||||
|
|
||||||
|
val dateRangeMacro: Gen[Expr.DateRangeMacro] =
|
||||||
|
for {
|
||||||
|
attr <- dateAttrGen
|
||||||
|
dl <- dateLiteralGen
|
||||||
|
p <- periodGen
|
||||||
|
calc <- Gen.option(calcGen)
|
||||||
|
range = calc match {
|
||||||
|
case Some(c @ Date.CalcDirection.Plus) =>
|
||||||
|
Expr.DateRangeMacro(attr, dl, Date.Calc(dl, c, p))
|
||||||
|
case Some(c @ Date.CalcDirection.Minus) =>
|
||||||
|
Expr.DateRangeMacro(attr, Date.Calc(dl, c, p), dl)
|
||||||
|
case None =>
|
||||||
|
Expr.DateRangeMacro(
|
||||||
|
attr,
|
||||||
|
Date.Calc(dl, Date.CalcDirection.Minus, p),
|
||||||
|
Date.Calc(dl, Date.CalcDirection.Plus, p)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} yield range
|
||||||
|
}
|
@ -0,0 +1,69 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.query.internal
|
||||||
|
|
||||||
|
import java.time.{LocalDate, Period}
|
||||||
|
|
||||||
|
import docspell.query.ItemQuery._
|
||||||
|
import docspell.query.{Date, ItemQueryGen, ParseFailure}
|
||||||
|
|
||||||
|
import munit.{FunSuite, ScalaCheckSuite}
|
||||||
|
import org.scalacheck.Prop.forAll
|
||||||
|
|
||||||
|
class ExprStringTest extends FunSuite with ScalaCheckSuite {
|
||||||
|
|
||||||
|
// parses the query without reducing and expanding macros
|
||||||
|
def singleParse(s: String): Expr =
|
||||||
|
ExprParser
|
||||||
|
.parseQuery(s)
|
||||||
|
.left
|
||||||
|
.map(ParseFailure.fromError(s))
|
||||||
|
.fold(f => sys.error(f.render), _.expr)
|
||||||
|
|
||||||
|
def exprString(expr: Expr): String =
|
||||||
|
ExprString(expr).fold(f => sys.error(f.toString), identity)
|
||||||
|
|
||||||
|
test("macro: name") {
|
||||||
|
val str = exprString(Expr.NamesMacro("test"))
|
||||||
|
val q = singleParse(str)
|
||||||
|
assertEquals(str, "names:\"test\"")
|
||||||
|
assertEquals(q, Expr.NamesMacro("test"))
|
||||||
|
}
|
||||||
|
|
||||||
|
test("macro: year") {
|
||||||
|
val str = exprString(Expr.YearMacro(Attr.Date, 1990))
|
||||||
|
val q = singleParse(str)
|
||||||
|
assertEquals(str, "year:1990")
|
||||||
|
assertEquals(q, Expr.YearMacro(Attr.Date, 1990))
|
||||||
|
}
|
||||||
|
|
||||||
|
test("macro: daterange") {
|
||||||
|
val range = Expr.DateRangeMacro(
|
||||||
|
attr = Attr.Date,
|
||||||
|
left = Date.Calc(
|
||||||
|
date = Date.Local(
|
||||||
|
date = LocalDate.of(2076, 12, 9)
|
||||||
|
),
|
||||||
|
calc = Date.CalcDirection.Minus,
|
||||||
|
period = Period.ofMonths(27)
|
||||||
|
),
|
||||||
|
right = Date.Local(LocalDate.of(2076, 12, 9))
|
||||||
|
)
|
||||||
|
val str = exprString(range)
|
||||||
|
val q = singleParse(str)
|
||||||
|
assertEquals(str, "dateIn:2076-12-09;-27m")
|
||||||
|
assertEquals(q, range)
|
||||||
|
}
|
||||||
|
|
||||||
|
property("generate expr and parse it") {
|
||||||
|
forAll(ItemQueryGen.exprGen) { expr =>
|
||||||
|
val str = exprString(expr)
|
||||||
|
val q = singleParse(str)
|
||||||
|
assertEquals(q, expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -8,7 +8,7 @@ package docspell.query.internal
|
|||||||
|
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
import docspell.query.ItemQueryParser
|
import docspell.query.{ItemQuery, ItemQueryParser}
|
||||||
|
|
||||||
import munit._
|
import munit._
|
||||||
|
|
||||||
@ -64,4 +64,14 @@ class ItemQueryParserTest extends FunSuite {
|
|||||||
ItemQueryParser.parseUnsafe("(| name:hello date:2021-02 name:world name:hello )")
|
ItemQueryParser.parseUnsafe("(| name:hello date:2021-02 name:world name:hello )")
|
||||||
assertEquals(expect.copy(raw = raw.some), q)
|
assertEquals(expect.copy(raw = raw.some), q)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
test("f.id:name=value") {
|
||||||
|
val raw = "f.id:QsuGW@=\"dAHBstXJd0\""
|
||||||
|
val q = ItemQueryParser.parseUnsafe(raw)
|
||||||
|
val expect =
|
||||||
|
ItemQuery.Expr.CustomFieldIdMatch("QsuGW@", ItemQuery.Operator.Eq, "dAHBstXJd0")
|
||||||
|
|
||||||
|
assertEquals(q.expr, expect)
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,7 @@ openapi: 3.0.0
|
|||||||
|
|
||||||
info:
|
info:
|
||||||
title: Docspell
|
title: Docspell
|
||||||
version: 0.27.0
|
version: 0.28.0-SNAPSHOT
|
||||||
description: |
|
description: |
|
||||||
This is the remote API to Docspell. Docspell is a free document
|
This is the remote API to Docspell. Docspell is a free document
|
||||||
management system focused on small groups or families.
|
management system focused on small groups or families.
|
||||||
@ -538,6 +538,37 @@ paths:
|
|||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/InviteResult"
|
$ref: "#/components/schemas/InviteResult"
|
||||||
|
|
||||||
|
/open/share/verify:
|
||||||
|
post:
|
||||||
|
operationId: "open-share-verify"
|
||||||
|
tags: [ Share ]
|
||||||
|
summary: Verify a secret for a share
|
||||||
|
description: |
|
||||||
|
Given the share id and optionally a password, it verifies the
|
||||||
|
correctness of the given data. As a result, a token is
|
||||||
|
returned that must be used with all `share/*` routes. If the
|
||||||
|
password is missing, but required, the response indicates
|
||||||
|
this. Then the requests needs to be replayed with the correct
|
||||||
|
password to retrieve the token.
|
||||||
|
|
||||||
|
The token is also added as a session cookie to the response.
|
||||||
|
|
||||||
|
The token is used to avoid passing the user define password
|
||||||
|
with every request.
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ShareSecret"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ShareVerifyResult"
|
||||||
|
|
||||||
/sec/auth/session:
|
/sec/auth/session:
|
||||||
post:
|
post:
|
||||||
operationId: "sec-auth-session"
|
operationId: "sec-auth-session"
|
||||||
@ -1527,6 +1558,187 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/BasicResult"
|
$ref: "#/components/schemas/BasicResult"
|
||||||
|
|
||||||
|
/share/search/query:
|
||||||
|
post:
|
||||||
|
operationId: "share-search-query"
|
||||||
|
tags: [Share]
|
||||||
|
summary: Performs a search in a share.
|
||||||
|
description: |
|
||||||
|
Allows to run a search query in the shared documents. The
|
||||||
|
input data structure is the same as with a standard query. The
|
||||||
|
`searchMode` parameter is ignored here.
|
||||||
|
security:
|
||||||
|
- shareTokenHeader: []
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ItemQuery"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ItemLightList"
|
||||||
|
/share/search/stats:
|
||||||
|
post:
|
||||||
|
operationId: "share-search-stats"
|
||||||
|
tags: [ Share ]
|
||||||
|
summary: Get basic statistics about search results.
|
||||||
|
description: |
|
||||||
|
Instead of returning the results of a query, uses it to return
|
||||||
|
a summary, constraint to the share.
|
||||||
|
security:
|
||||||
|
- shareTokenHeader: []
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ItemQuery"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/SearchStats"
|
||||||
|
/share/item/{id}:
|
||||||
|
get:
|
||||||
|
operationId: "share-item-get"
|
||||||
|
tags: [ Share ]
|
||||||
|
summary: Get details about an item.
|
||||||
|
description: |
|
||||||
|
Get detailed information about an item.
|
||||||
|
security:
|
||||||
|
- shareTokenHeader: []
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/id"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ItemDetail"
|
||||||
|
/share/attachment/{id}:
|
||||||
|
head:
|
||||||
|
operationId: "share-attach-head"
|
||||||
|
tags: [ Share ]
|
||||||
|
summary: Get headers to an attachment file.
|
||||||
|
description: |
|
||||||
|
Get information about the binary file belonging to the
|
||||||
|
attachment with the given id.
|
||||||
|
security:
|
||||||
|
- shareTokenHeader: []
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/id"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
headers:
|
||||||
|
Content-Type:
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
Content-Length:
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
format: int64
|
||||||
|
ETag:
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
Content-Disposition:
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
get:
|
||||||
|
operationId: "share-attach-get"
|
||||||
|
tags: [ Share ]
|
||||||
|
summary: Get an attachment file.
|
||||||
|
description: |
|
||||||
|
Get the binary file belonging to the attachment with the given
|
||||||
|
id. The binary is a pdf file. If conversion failed, then the
|
||||||
|
original file is returned.
|
||||||
|
security:
|
||||||
|
- shareTokenHeader: []
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/id"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/octet-stream:
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
format: binary
|
||||||
|
/share/attachment/{id}/view:
|
||||||
|
get:
|
||||||
|
operationId: "share-attach-show-viewerjs"
|
||||||
|
tags: [ Share ]
|
||||||
|
summary: A javascript rendered view of the pdf attachment
|
||||||
|
description: |
|
||||||
|
This provides a preview of the attachment rendered in a
|
||||||
|
browser.
|
||||||
|
|
||||||
|
It currently uses a third-party javascript library (viewerjs)
|
||||||
|
to display the preview. This works by redirecting to the
|
||||||
|
viewerjs url with the attachment url as parameter. Note that
|
||||||
|
the resulting url that is redirected to is not stable. It may
|
||||||
|
change from version to version. This route, however, is meant
|
||||||
|
to provide a stable url for the preview.
|
||||||
|
security:
|
||||||
|
- shareTokenHeader: []
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/id"
|
||||||
|
responses:
|
||||||
|
303:
|
||||||
|
description: See Other
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
/share/attachment/{id}/preview:
|
||||||
|
head:
|
||||||
|
operationId: "share-attach-check-preview"
|
||||||
|
tags: [ Attachment ]
|
||||||
|
summary: Get the headers to a preview image of an attachment file.
|
||||||
|
description: |
|
||||||
|
Checks if an image file showing a preview of the attachment is
|
||||||
|
available. If not available, a 404 is returned.
|
||||||
|
security:
|
||||||
|
- shareTokenHeader: []
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/id"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
404:
|
||||||
|
description: NotFound
|
||||||
|
get:
|
||||||
|
operationId: "share-attach-get-preview"
|
||||||
|
tags: [ Attachment ]
|
||||||
|
summary: Get a preview image of an attachment file.
|
||||||
|
description: |
|
||||||
|
Gets a image file showing a preview of the attachment. Usually
|
||||||
|
it is a small image of the first page of the document.If not
|
||||||
|
available, a 404 is returned. However, if the query parameter
|
||||||
|
`withFallback` is `true`, a fallback preview image is
|
||||||
|
returned. You can also use the `HEAD` method to check for
|
||||||
|
existence.
|
||||||
|
|
||||||
|
The attachment must be in the search results of the current
|
||||||
|
share.
|
||||||
|
security:
|
||||||
|
- shareTokenHeader: []
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/id"
|
||||||
|
- $ref: "#/components/parameters/withFallback"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/octet-stream:
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
format: binary
|
||||||
|
|
||||||
/admin/user/resetPassword:
|
/admin/user/resetPassword:
|
||||||
post:
|
post:
|
||||||
operationId: "admin-user-reset-password"
|
operationId: "admin-user-reset-password"
|
||||||
@ -1711,6 +1923,125 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/BasicResult"
|
$ref: "#/components/schemas/BasicResult"
|
||||||
|
|
||||||
|
/sec/share:
|
||||||
|
get:
|
||||||
|
operationId: "sec-share-get-all"
|
||||||
|
tags: [ Share ]
|
||||||
|
summary: Get a list of shares
|
||||||
|
description: |
|
||||||
|
Return a list of all shares for this collective.
|
||||||
|
security:
|
||||||
|
- authTokenHeader: []
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/q"
|
||||||
|
- $ref: "#/components/parameters/owningShare"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ShareList"
|
||||||
|
post:
|
||||||
|
operationId: "sec-share-new"
|
||||||
|
tags: [ Share ]
|
||||||
|
summary: Create a new share.
|
||||||
|
description: |
|
||||||
|
Create a new share.
|
||||||
|
security:
|
||||||
|
- authTokenHeader: []
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ShareData"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/IdResult"
|
||||||
|
/sec/share/email/send/{name}:
|
||||||
|
post:
|
||||||
|
operationId: "sec-share-email-send"
|
||||||
|
tags: [ Share, E-Mail ]
|
||||||
|
summary: Send an email.
|
||||||
|
description: |
|
||||||
|
Sends an email as specified in the body of the request.
|
||||||
|
|
||||||
|
An existing shareId must be given with the request, no matter
|
||||||
|
the content of the mail.
|
||||||
|
security:
|
||||||
|
- authTokenHeader: []
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/name"
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/SimpleShareMail"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/BasicResult"
|
||||||
|
/sec/share/{shareId}:
|
||||||
|
parameters:
|
||||||
|
- $ref: "#/components/parameters/shareId"
|
||||||
|
get:
|
||||||
|
operationId: "sec-share-get"
|
||||||
|
tags: [Share]
|
||||||
|
summary: Get details to a single share.
|
||||||
|
description: |
|
||||||
|
Given the id of a share, returns some details about it.
|
||||||
|
security:
|
||||||
|
- authTokenHeader: []
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ShareDetail"
|
||||||
|
put:
|
||||||
|
operationId: "sec-share-update"
|
||||||
|
tags: [ Share ]
|
||||||
|
summary: Update an existing share.
|
||||||
|
description: |
|
||||||
|
Updates an existing share.
|
||||||
|
security:
|
||||||
|
- authTokenHeader: []
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ShareData"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/BasicResult"
|
||||||
|
delete:
|
||||||
|
operationId: "sec-share-delete-by-id"
|
||||||
|
tags: [ Share ]
|
||||||
|
summary: Delete a share.
|
||||||
|
description: |
|
||||||
|
Deletes a share
|
||||||
|
security:
|
||||||
|
- authTokenHeader: []
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Ok
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/BasicResult"
|
||||||
|
|
||||||
/sec/item/search:
|
/sec/item/search:
|
||||||
get:
|
get:
|
||||||
operationId: "sec-item-search-by-get"
|
operationId: "sec-item-search-by-get"
|
||||||
@ -4096,6 +4427,126 @@ paths:
|
|||||||
|
|
||||||
components:
|
components:
|
||||||
schemas:
|
schemas:
|
||||||
|
ShareSecret:
|
||||||
|
description: |
|
||||||
|
The secret (the share id + optional password) to access a
|
||||||
|
share.
|
||||||
|
required:
|
||||||
|
- shareId
|
||||||
|
properties:
|
||||||
|
shareId:
|
||||||
|
type: string
|
||||||
|
format: ident
|
||||||
|
password:
|
||||||
|
type: string
|
||||||
|
format: password
|
||||||
|
|
||||||
|
ShareVerifyResult:
|
||||||
|
description: |
|
||||||
|
The data returned when verifying a `ShareSecret`.
|
||||||
|
required:
|
||||||
|
- success
|
||||||
|
- token
|
||||||
|
- passwordRequired
|
||||||
|
- message
|
||||||
|
properties:
|
||||||
|
success:
|
||||||
|
type: boolean
|
||||||
|
token:
|
||||||
|
type: string
|
||||||
|
passwordRequired:
|
||||||
|
type: boolean
|
||||||
|
message:
|
||||||
|
type: string
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The name of the share if it exists. Only valid to use when
|
||||||
|
`success` is `true`.
|
||||||
|
|
||||||
|
ShareData:
|
||||||
|
description: |
|
||||||
|
Editable data for a share.
|
||||||
|
required:
|
||||||
|
- query
|
||||||
|
- enabled
|
||||||
|
- publishUntil
|
||||||
|
properties:
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
query:
|
||||||
|
type: string
|
||||||
|
format: itemquery
|
||||||
|
enabled:
|
||||||
|
type: boolean
|
||||||
|
password:
|
||||||
|
type: string
|
||||||
|
format: password
|
||||||
|
publishUntil:
|
||||||
|
type: integer
|
||||||
|
format: date-time
|
||||||
|
removePassword:
|
||||||
|
type: boolean
|
||||||
|
description: |
|
||||||
|
For an update request, this can control whether to delete
|
||||||
|
the password. Otherwise if the password is not set, it
|
||||||
|
will not be changed. When adding a new share, this has no
|
||||||
|
effect.
|
||||||
|
|
||||||
|
ShareDetail:
|
||||||
|
description: |
|
||||||
|
Details for an existing share.
|
||||||
|
required:
|
||||||
|
- id
|
||||||
|
- query
|
||||||
|
- owner
|
||||||
|
- enabled
|
||||||
|
- publishAt
|
||||||
|
- publishUntil
|
||||||
|
- password
|
||||||
|
- views
|
||||||
|
- expired
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
format: ident
|
||||||
|
query:
|
||||||
|
type: string
|
||||||
|
format: itemquery
|
||||||
|
owner:
|
||||||
|
$ref: "#/components/schemas/IdName"
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
enabled:
|
||||||
|
type: boolean
|
||||||
|
publishAt:
|
||||||
|
type: integer
|
||||||
|
format: date-time
|
||||||
|
publishUntil:
|
||||||
|
type: integer
|
||||||
|
format: date-time
|
||||||
|
expired:
|
||||||
|
type: boolean
|
||||||
|
password:
|
||||||
|
type: boolean
|
||||||
|
views:
|
||||||
|
type: integer
|
||||||
|
format: int32
|
||||||
|
lastAccess:
|
||||||
|
type: integer
|
||||||
|
format: date-time
|
||||||
|
|
||||||
|
ShareList:
|
||||||
|
description: |
|
||||||
|
A list of shares.
|
||||||
|
required:
|
||||||
|
- items
|
||||||
|
properties:
|
||||||
|
items:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/ShareDetail"
|
||||||
|
|
||||||
DeleteUserData:
|
DeleteUserData:
|
||||||
description: |
|
description: |
|
||||||
An excerpt of data that would be deleted when deleting the
|
An excerpt of data that would be deleted when deleting the
|
||||||
@ -4103,6 +4554,7 @@ components:
|
|||||||
required:
|
required:
|
||||||
- folders
|
- folders
|
||||||
- sentMails
|
- sentMails
|
||||||
|
- shares
|
||||||
properties:
|
properties:
|
||||||
folders:
|
folders:
|
||||||
type: array
|
type: array
|
||||||
@ -4112,6 +4564,9 @@ components:
|
|||||||
sentMails:
|
sentMails:
|
||||||
type: integer
|
type: integer
|
||||||
format: int32
|
format: int32
|
||||||
|
shares:
|
||||||
|
type: integer
|
||||||
|
format: int32
|
||||||
|
|
||||||
SecondFactor:
|
SecondFactor:
|
||||||
description: |
|
description: |
|
||||||
@ -4864,6 +5319,36 @@ components:
|
|||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
format: ident
|
format: ident
|
||||||
|
SimpleShareMail:
|
||||||
|
description: |
|
||||||
|
A simple e-mail related to a share.
|
||||||
|
required:
|
||||||
|
- shareId
|
||||||
|
- recipients
|
||||||
|
- cc
|
||||||
|
- bcc
|
||||||
|
- subject
|
||||||
|
- body
|
||||||
|
properties:
|
||||||
|
shareId:
|
||||||
|
type: string
|
||||||
|
format: ident
|
||||||
|
recipients:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
cc:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
bcc:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
subject:
|
||||||
|
type: string
|
||||||
|
body:
|
||||||
|
type: string
|
||||||
EmailSettingsList:
|
EmailSettingsList:
|
||||||
description: |
|
description: |
|
||||||
A list of user email settings.
|
A list of user email settings.
|
||||||
@ -4977,6 +5462,10 @@ components:
|
|||||||
- tagCategoryCloud
|
- tagCategoryCloud
|
||||||
- fieldStats
|
- fieldStats
|
||||||
- folderStats
|
- folderStats
|
||||||
|
- corrOrgStats
|
||||||
|
- corrPersStats
|
||||||
|
- concPersStats
|
||||||
|
- concEquipStats
|
||||||
properties:
|
properties:
|
||||||
count:
|
count:
|
||||||
type: integer
|
type: integer
|
||||||
@ -4993,6 +5482,23 @@ components:
|
|||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
$ref: "#/components/schemas/FolderStats"
|
$ref: "#/components/schemas/FolderStats"
|
||||||
|
corrOrgStats:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/IdRefStats"
|
||||||
|
corrPersStats:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/IdRefStats"
|
||||||
|
concPersStats:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/IdRefStats"
|
||||||
|
concEquipStats:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/IdRefStats"
|
||||||
|
|
||||||
ItemInsights:
|
ItemInsights:
|
||||||
description: |
|
description: |
|
||||||
Information about the items in docspell.
|
Information about the items in docspell.
|
||||||
@ -5126,6 +5632,19 @@ components:
|
|||||||
type: integer
|
type: integer
|
||||||
format: int32
|
format: int32
|
||||||
|
|
||||||
|
IdRefStats:
|
||||||
|
description: |
|
||||||
|
Counting some objects that have an id and a name.
|
||||||
|
required:
|
||||||
|
- ref
|
||||||
|
- count
|
||||||
|
properties:
|
||||||
|
ref:
|
||||||
|
$ref: "#/components/schemas/IdName"
|
||||||
|
count:
|
||||||
|
type: integer
|
||||||
|
format: int32
|
||||||
|
|
||||||
AttachmentMeta:
|
AttachmentMeta:
|
||||||
description: |
|
description: |
|
||||||
Extracted meta data of an attachment.
|
Extracted meta data of an attachment.
|
||||||
@ -5635,6 +6154,7 @@ components:
|
|||||||
- integrationEnabled
|
- integrationEnabled
|
||||||
- classifier
|
- classifier
|
||||||
- emptyTrash
|
- emptyTrash
|
||||||
|
- passwords
|
||||||
properties:
|
properties:
|
||||||
language:
|
language:
|
||||||
type: string
|
type: string
|
||||||
@ -5648,6 +6168,11 @@ components:
|
|||||||
$ref: "#/components/schemas/ClassifierSetting"
|
$ref: "#/components/schemas/ClassifierSetting"
|
||||||
emptyTrash:
|
emptyTrash:
|
||||||
$ref: "#/components/schemas/EmptyTrashSetting"
|
$ref: "#/components/schemas/EmptyTrashSetting"
|
||||||
|
passwords:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
format: password
|
||||||
|
|
||||||
EmptyTrashSetting:
|
EmptyTrashSetting:
|
||||||
description: |
|
description: |
|
||||||
@ -6115,8 +6640,8 @@ components:
|
|||||||
type: string
|
type: string
|
||||||
IdResult:
|
IdResult:
|
||||||
description: |
|
description: |
|
||||||
Some basic result of an operation with an ID as payload. If
|
Some basic result of an operation with an ID as payload, if
|
||||||
success if `false` the id is not usable.
|
success is true. If success is `false` the id is not usable.
|
||||||
required:
|
required:
|
||||||
- success
|
- success
|
||||||
- message
|
- message
|
||||||
@ -6236,6 +6761,10 @@ components:
|
|||||||
type: apiKey
|
type: apiKey
|
||||||
in: header
|
in: header
|
||||||
name: Docspell-Admin-Secret
|
name: Docspell-Admin-Secret
|
||||||
|
shareTokenHeader:
|
||||||
|
type: apiKey
|
||||||
|
in: header
|
||||||
|
name: Docspell-Share-Auth
|
||||||
parameters:
|
parameters:
|
||||||
id:
|
id:
|
||||||
name: id
|
name: id
|
||||||
@ -6251,6 +6780,13 @@ components:
|
|||||||
required: true
|
required: true
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
|
shareId:
|
||||||
|
name: shareId
|
||||||
|
in: path
|
||||||
|
description: An identifier for a share
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
username:
|
username:
|
||||||
name: username
|
name: username
|
||||||
in: path
|
in: path
|
||||||
@ -6279,6 +6815,13 @@ components:
|
|||||||
required: false
|
required: false
|
||||||
schema:
|
schema:
|
||||||
type: boolean
|
type: boolean
|
||||||
|
owningShare:
|
||||||
|
name: owning
|
||||||
|
in: query
|
||||||
|
description: Return my own shares only
|
||||||
|
required: false
|
||||||
|
schema:
|
||||||
|
type: boolean
|
||||||
checksum:
|
checksum:
|
||||||
name: checksum
|
name: checksum
|
||||||
in: path
|
in: path
|
||||||
|
@ -0,0 +1,24 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.restapi.codec
|
||||||
|
|
||||||
|
import docspell.query.{ItemQuery, ItemQueryParser}
|
||||||
|
|
||||||
|
import io.circe.{Decoder, Encoder}
|
||||||
|
|
||||||
|
trait ItemQueryJson {
|
||||||
|
|
||||||
|
implicit val itemQueryDecoder: Decoder[ItemQuery] =
|
||||||
|
Decoder.decodeString.emap(str => ItemQueryParser.parse(str).left.map(_.render))
|
||||||
|
|
||||||
|
implicit val itemQueryEncoder: Encoder[ItemQuery] =
|
||||||
|
Encoder.encodeString.contramap(q =>
|
||||||
|
q.raw.getOrElse(ItemQueryParser.unsafeAsString(q.expr))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
object ItemQueryJson extends ItemQueryJson
|
@ -47,8 +47,9 @@ docspell.server {
|
|||||||
# The secret for this server that is used to sign the authenicator
|
# The secret for this server that is used to sign the authenicator
|
||||||
# tokens. If multiple servers are running, all must share the same
|
# tokens. If multiple servers are running, all must share the same
|
||||||
# secret. You can use base64 or hex strings (prefix with b64: and
|
# secret. You can use base64 or hex strings (prefix with b64: and
|
||||||
# hex:, respectively).
|
# hex:, respectively). If empty, a random secret is generated.
|
||||||
server-secret = "hex:caffee"
|
# Example: b64:YRx77QujCGkHSvll0TVEmtTaw3Z5eXr+nWMsEJowgKg=
|
||||||
|
server-secret = ""
|
||||||
|
|
||||||
# How long an authentication token is valid. The web application
|
# How long an authentication token is valid. The web application
|
||||||
# will get a new one periodically.
|
# will get a new one periodically.
|
||||||
@ -279,6 +280,7 @@ docspell.server {
|
|||||||
|
|
||||||
# Configuration for the backend.
|
# Configuration for the backend.
|
||||||
backend {
|
backend {
|
||||||
|
|
||||||
# Enable or disable debugging for e-mail related functionality. This
|
# Enable or disable debugging for e-mail related functionality. This
|
||||||
# applies to both sending and receiving mails. For security reasons
|
# applies to both sending and receiving mails. For security reasons
|
||||||
# logging is not very extensive on authentication failures. Setting
|
# logging is not very extensive on authentication failures. Setting
|
||||||
@ -286,13 +288,17 @@ docspell.server {
|
|||||||
mail-debug = false
|
mail-debug = false
|
||||||
|
|
||||||
# The database connection.
|
# The database connection.
|
||||||
#
|
|
||||||
# By default a H2 file-based database is configured. You can
|
|
||||||
# provide a postgresql or mariadb connection here. When using H2
|
|
||||||
# use the PostgreSQL compatibility mode and AUTO_SERVER feature.
|
|
||||||
jdbc {
|
jdbc {
|
||||||
|
# The JDBC url to the database. By default a H2 file-based
|
||||||
|
# database is configured. You can provide a postgresql or
|
||||||
|
# mariadb connection here. When using H2 use the PostgreSQL
|
||||||
|
# compatibility mode and AUTO_SERVER feature.
|
||||||
url = "jdbc:h2://"${java.io.tmpdir}"/docspell-demo.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE;AUTO_SERVER=TRUE"
|
url = "jdbc:h2://"${java.io.tmpdir}"/docspell-demo.db;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE;AUTO_SERVER=TRUE"
|
||||||
|
|
||||||
|
# The database user.
|
||||||
user = "sa"
|
user = "sa"
|
||||||
|
|
||||||
|
# The database password.
|
||||||
password = ""
|
password = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,23 +6,34 @@
|
|||||||
|
|
||||||
package docspell.restserver
|
package docspell.restserver
|
||||||
|
|
||||||
|
import java.security.SecureRandom
|
||||||
|
|
||||||
import cats.Semigroup
|
import cats.Semigroup
|
||||||
import cats.data.{Validated, ValidatedNec}
|
import cats.data.{Validated, ValidatedNec}
|
||||||
|
import cats.effect.Async
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
import docspell.backend.signup.{Config => SignupConfig}
|
import docspell.backend.signup.{Config => SignupConfig}
|
||||||
import docspell.common.config.Implicits._
|
import docspell.common.Logger
|
||||||
|
import docspell.config.ConfigFactory
|
||||||
|
import docspell.config.Implicits._
|
||||||
import docspell.oidc.{ProviderConfig, SignatureAlgo}
|
import docspell.oidc.{ProviderConfig, SignatureAlgo}
|
||||||
import docspell.restserver.auth.OpenId
|
import docspell.restserver.auth.OpenId
|
||||||
|
|
||||||
import pureconfig._
|
import pureconfig._
|
||||||
import pureconfig.generic.auto._
|
import pureconfig.generic.auto._
|
||||||
|
import scodec.bits.ByteVector
|
||||||
|
|
||||||
object ConfigFile {
|
object ConfigFile {
|
||||||
|
private[this] val unsafeLogger = org.log4s.getLogger
|
||||||
import Implicits._
|
import Implicits._
|
||||||
|
|
||||||
def loadConfig: Config =
|
def loadConfig[F[_]: Async](args: List[String]): F[Config] = {
|
||||||
Validate(ConfigSource.default.at("docspell.server").loadOrThrow[Config])
|
val logger = Logger.log4s(unsafeLogger)
|
||||||
|
ConfigFactory
|
||||||
|
.default[F, Config](logger, "docspell.server")(args)
|
||||||
|
.map(cfg => Validate(cfg))
|
||||||
|
}
|
||||||
|
|
||||||
object Implicits {
|
object Implicits {
|
||||||
implicit val signupModeReader: ConfigReader[SignupConfig.Mode] =
|
implicit val signupModeReader: ConfigReader[SignupConfig.Mode] =
|
||||||
@ -50,12 +61,25 @@ object ConfigFile {
|
|||||||
|
|
||||||
def all(cfg: Config) = List(
|
def all(cfg: Config) = List(
|
||||||
duplicateOpenIdProvider(cfg),
|
duplicateOpenIdProvider(cfg),
|
||||||
signKeyVsUserUrl(cfg)
|
signKeyVsUserUrl(cfg),
|
||||||
|
generateSecretIfEmpty(cfg)
|
||||||
)
|
)
|
||||||
|
|
||||||
private def valid(cfg: Config): ValidatedNec[String, Config] =
|
private def valid(cfg: Config): ValidatedNec[String, Config] =
|
||||||
Validated.validNec(cfg)
|
Validated.validNec(cfg)
|
||||||
|
|
||||||
|
def generateSecretIfEmpty(cfg: Config): ValidatedNec[String, Config] =
|
||||||
|
if (cfg.auth.serverSecret.isEmpty) {
|
||||||
|
unsafeLogger.warn(
|
||||||
|
"No serverSecret specified. Generating a random one. It is recommended to add a server-secret in the config file."
|
||||||
|
)
|
||||||
|
val random = new SecureRandom()
|
||||||
|
val buffer = new Array[Byte](32)
|
||||||
|
random.nextBytes(buffer)
|
||||||
|
val secret = ByteVector.view(buffer)
|
||||||
|
valid(cfg.copy(auth = cfg.auth.copy(serverSecret = secret)))
|
||||||
|
} else valid(cfg)
|
||||||
|
|
||||||
def duplicateOpenIdProvider(cfg: Config): ValidatedNec[String, Config] = {
|
def duplicateOpenIdProvider(cfg: Config): ValidatedNec[String, Config] = {
|
||||||
val dupes =
|
val dupes =
|
||||||
cfg.openid
|
cfg.openid
|
||||||
|
@ -6,46 +6,21 @@
|
|||||||
|
|
||||||
package docspell.restserver
|
package docspell.restserver
|
||||||
|
|
||||||
import java.nio.file.{Files, Paths}
|
|
||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
|
|
||||||
import org.log4s._
|
import org.log4s.getLogger
|
||||||
|
|
||||||
object Main extends IOApp {
|
object Main extends IOApp {
|
||||||
private[this] val logger = getLogger
|
private[this] val logger: Logger[IO] = Logger.log4s(getLogger)
|
||||||
|
|
||||||
val blockingEC =
|
private val connectEC =
|
||||||
ThreadFactories.cached[IO](ThreadFactories.ofName("docspell-restserver-blocking"))
|
|
||||||
val connectEC =
|
|
||||||
ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-dbconnect"))
|
ThreadFactories.fixed[IO](5, ThreadFactories.ofName("docspell-dbconnect"))
|
||||||
val restserverEC =
|
|
||||||
ThreadFactories.workSteal[IO](ThreadFactories.ofNameFJ("docspell-restserver"))
|
|
||||||
|
|
||||||
def run(args: List[String]) = {
|
def run(args: List[String]) = for {
|
||||||
args match {
|
cfg <- ConfigFile.loadConfig[IO](args)
|
||||||
case file :: Nil =>
|
banner = Banner(
|
||||||
val path = Paths.get(file).toAbsolutePath.normalize
|
|
||||||
logger.info(s"Using given config file: $path")
|
|
||||||
System.setProperty("config.file", file)
|
|
||||||
case _ =>
|
|
||||||
Option(System.getProperty("config.file")) match {
|
|
||||||
case Some(f) if f.nonEmpty =>
|
|
||||||
val path = Paths.get(f).toAbsolutePath.normalize
|
|
||||||
if (!Files.exists(path)) {
|
|
||||||
logger.info(s"Not using config file '$f' because it doesn't exist")
|
|
||||||
System.clearProperty("config.file")
|
|
||||||
} else
|
|
||||||
logger.info(s"Using config file from system properties: $f")
|
|
||||||
case _ =>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
val cfg = ConfigFile.loadConfig
|
|
||||||
val banner = Banner(
|
|
||||||
"REST Server",
|
"REST Server",
|
||||||
BuildInfo.version,
|
BuildInfo.version,
|
||||||
BuildInfo.gitHeadCommit,
|
BuildInfo.gitHeadCommit,
|
||||||
@ -55,17 +30,14 @@ object Main extends IOApp {
|
|||||||
cfg.baseUrl,
|
cfg.baseUrl,
|
||||||
Some(cfg.fullTextSearch.solr.url).filter(_ => cfg.fullTextSearch.enabled)
|
Some(cfg.fullTextSearch.solr.url).filter(_ => cfg.fullTextSearch.enabled)
|
||||||
)
|
)
|
||||||
val pools = for {
|
_ <- logger.info(s"\n${banner.render("***>")}")
|
||||||
cec <- connectEC
|
_ <-
|
||||||
bec <- blockingEC
|
|
||||||
rec <- restserverEC
|
|
||||||
} yield Pools(cec, bec, rec)
|
|
||||||
|
|
||||||
logger.info(s"\n${banner.render("***>")}")
|
|
||||||
if (EnvMode.current.isDev) {
|
if (EnvMode.current.isDev) {
|
||||||
logger.warn(">>>>> Docspell is running in DEV mode! <<<<<")
|
logger.warn(">>>>> Docspell is running in DEV mode! <<<<<")
|
||||||
}
|
} else IO(())
|
||||||
|
|
||||||
|
pools = connectEC.map(Pools.apply)
|
||||||
|
rc <-
|
||||||
pools.use(p =>
|
pools.use(p =>
|
||||||
RestServer
|
RestServer
|
||||||
.stream[IO](cfg, p)
|
.stream[IO](cfg, p)
|
||||||
@ -73,5 +45,5 @@ object Main extends IOApp {
|
|||||||
.drain
|
.drain
|
||||||
.as(ExitCode.Success)
|
.as(ExitCode.Success)
|
||||||
)
|
)
|
||||||
}
|
} yield rc
|
||||||
}
|
}
|
||||||
|
@ -32,11 +32,10 @@ object RestAppImpl {
|
|||||||
|
|
||||||
def create[F[_]: Async](
|
def create[F[_]: Async](
|
||||||
cfg: Config,
|
cfg: Config,
|
||||||
connectEC: ExecutionContext,
|
connectEC: ExecutionContext
|
||||||
httpClientEc: ExecutionContext
|
|
||||||
): Resource[F, RestApp[F]] =
|
): Resource[F, RestApp[F]] =
|
||||||
for {
|
for {
|
||||||
backend <- BackendApp(cfg.backend, connectEC, httpClientEc)(
|
backend <- BackendApp(cfg.backend, connectEC)(
|
||||||
createFtsClient[F](cfg)
|
createFtsClient[F](cfg)
|
||||||
)
|
)
|
||||||
app = new RestAppImpl[F](cfg, backend)
|
app = new RestAppImpl[F](cfg, backend)
|
||||||
|
@ -10,7 +10,7 @@ import cats.effect._
|
|||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
|
||||||
import docspell.backend.auth.AuthToken
|
import docspell.backend.auth.{AuthToken, ShareToken}
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.oidc.CodeFlowRoutes
|
import docspell.oidc.CodeFlowRoutes
|
||||||
import docspell.restserver.auth.OpenId
|
import docspell.restserver.auth.OpenId
|
||||||
@ -34,19 +34,20 @@ object RestServer {
|
|||||||
|
|
||||||
val templates = TemplateRoutes[F](cfg)
|
val templates = TemplateRoutes[F](cfg)
|
||||||
val app = for {
|
val app = for {
|
||||||
restApp <-
|
restApp <- RestAppImpl.create[F](cfg, pools.connectEC)
|
||||||
RestAppImpl
|
httpClient <- BlazeClientBuilder[F].resource
|
||||||
.create[F](cfg, pools.connectEC, pools.httpClientEC)
|
|
||||||
httpClient <- BlazeClientBuilder[F](pools.httpClientEC).resource
|
|
||||||
httpApp = Router(
|
httpApp = Router(
|
||||||
"/api/info" -> routes.InfoRoutes(),
|
"/api/info" -> routes.InfoRoutes(),
|
||||||
"/api/v1/open/" -> openRoutes(cfg, httpClient, restApp),
|
"/api/v1/open/" -> openRoutes(cfg, httpClient, restApp),
|
||||||
"/api/v1/sec/" -> Authenticate(restApp.backend.login, cfg.auth) { token =>
|
"/api/v1/sec/" -> Authenticate(restApp.backend.login, cfg.auth) { token =>
|
||||||
securedRoutes(cfg, restApp, token)
|
securedRoutes(cfg, restApp, token)
|
||||||
},
|
},
|
||||||
"/api/v1/admin" -> AdminRoutes(cfg.adminEndpoint) {
|
"/api/v1/admin" -> AdminAuth(cfg.adminEndpoint) {
|
||||||
adminRoutes(cfg, restApp)
|
adminRoutes(cfg, restApp)
|
||||||
},
|
},
|
||||||
|
"/api/v1/share" -> ShareAuth(restApp.backend.share, cfg.auth) { token =>
|
||||||
|
shareRoutes(cfg, restApp, token)
|
||||||
|
},
|
||||||
"/api/doc" -> templates.doc,
|
"/api/doc" -> templates.doc,
|
||||||
"/app/assets" -> EnvMiddleware(WebjarRoutes.appRoutes[F]),
|
"/app/assets" -> EnvMiddleware(WebjarRoutes.appRoutes[F]),
|
||||||
"/app" -> EnvMiddleware(templates.app),
|
"/app" -> EnvMiddleware(templates.app),
|
||||||
@ -61,7 +62,7 @@ object RestServer {
|
|||||||
Stream
|
Stream
|
||||||
.resource(app)
|
.resource(app)
|
||||||
.flatMap(httpApp =>
|
.flatMap(httpApp =>
|
||||||
BlazeServerBuilder[F](pools.restEC)
|
BlazeServerBuilder[F]
|
||||||
.bindHttp(cfg.bind.port, cfg.bind.address)
|
.bindHttp(cfg.bind.port, cfg.bind.address)
|
||||||
.withHttpApp(httpApp)
|
.withHttpApp(httpApp)
|
||||||
.withoutBanner
|
.withoutBanner
|
||||||
@ -94,6 +95,7 @@ object RestServer {
|
|||||||
"email/send" -> MailSendRoutes(restApp.backend, token),
|
"email/send" -> MailSendRoutes(restApp.backend, token),
|
||||||
"email/settings" -> MailSettingsRoutes(restApp.backend, token),
|
"email/settings" -> MailSettingsRoutes(restApp.backend, token),
|
||||||
"email/sent" -> SentMailRoutes(restApp.backend, token),
|
"email/sent" -> SentMailRoutes(restApp.backend, token),
|
||||||
|
"share" -> ShareRoutes.manage(restApp.backend, token),
|
||||||
"usertask/notifydueitems" -> NotifyDueItemsRoutes(cfg, restApp.backend, token),
|
"usertask/notifydueitems" -> NotifyDueItemsRoutes(cfg, restApp.backend, token),
|
||||||
"usertask/scanmailbox" -> ScanMailboxRoutes(restApp.backend, token),
|
"usertask/scanmailbox" -> ScanMailboxRoutes(restApp.backend, token),
|
||||||
"calevent/check" -> CalEventCheckRoutes(),
|
"calevent/check" -> CalEventCheckRoutes(),
|
||||||
@ -119,7 +121,8 @@ object RestServer {
|
|||||||
"signup" -> RegisterRoutes(restApp.backend, cfg),
|
"signup" -> RegisterRoutes(restApp.backend, cfg),
|
||||||
"upload" -> UploadRoutes.open(restApp.backend, cfg),
|
"upload" -> UploadRoutes.open(restApp.backend, cfg),
|
||||||
"checkfile" -> CheckFileRoutes.open(restApp.backend),
|
"checkfile" -> CheckFileRoutes.open(restApp.backend),
|
||||||
"integration" -> IntegrationEndpointRoutes.open(restApp.backend, cfg)
|
"integration" -> IntegrationEndpointRoutes.open(restApp.backend, cfg),
|
||||||
|
"share" -> ShareRoutes.verify(restApp.backend, cfg)
|
||||||
)
|
)
|
||||||
|
|
||||||
def adminRoutes[F[_]: Async](cfg: Config, restApp: RestApp[F]): HttpRoutes[F] =
|
def adminRoutes[F[_]: Async](cfg: Config, restApp: RestApp[F]): HttpRoutes[F] =
|
||||||
@ -131,6 +134,17 @@ object RestServer {
|
|||||||
"attachments" -> AttachmentRoutes.admin(restApp.backend)
|
"attachments" -> AttachmentRoutes.admin(restApp.backend)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def shareRoutes[F[_]: Async](
|
||||||
|
cfg: Config,
|
||||||
|
restApp: RestApp[F],
|
||||||
|
token: ShareToken
|
||||||
|
): HttpRoutes[F] =
|
||||||
|
Router(
|
||||||
|
"search" -> ShareSearchRoutes(restApp.backend, cfg, token),
|
||||||
|
"attachment" -> ShareAttachmentRoutes(restApp.backend, token),
|
||||||
|
"item" -> ShareItemRoutes(restApp.backend, token)
|
||||||
|
)
|
||||||
|
|
||||||
def redirectTo[F[_]: Async](path: String): HttpRoutes[F] = {
|
def redirectTo[F[_]: Async](path: String): HttpRoutes[F] = {
|
||||||
val dsl = new Http4sDsl[F] {}
|
val dsl = new Http4sDsl[F] {}
|
||||||
import dsl._
|
import dsl._
|
||||||
|
@ -0,0 +1,69 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.restserver.auth
|
||||||
|
|
||||||
|
import docspell.backend.auth.ShareToken
|
||||||
|
import docspell.common._
|
||||||
|
|
||||||
|
import org.http4s._
|
||||||
|
import org.typelevel.ci.CIString
|
||||||
|
|
||||||
|
final case class ShareCookieData(token: ShareToken) {
|
||||||
|
def asString: String = token.asString
|
||||||
|
|
||||||
|
def asCookie(baseUrl: LenientUri): ResponseCookie = {
|
||||||
|
val sec = baseUrl.scheme.exists(_.endsWith("s"))
|
||||||
|
val path = baseUrl.path / "api" / "v1"
|
||||||
|
ResponseCookie(
|
||||||
|
name = ShareCookieData.cookieName,
|
||||||
|
content = asString,
|
||||||
|
domain = None,
|
||||||
|
path = Some(path.asString),
|
||||||
|
httpOnly = true,
|
||||||
|
secure = sec,
|
||||||
|
maxAge = None,
|
||||||
|
expires = None
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def addCookie[F[_]](baseUrl: LenientUri)(
|
||||||
|
resp: Response[F]
|
||||||
|
): Response[F] =
|
||||||
|
resp.addCookie(asCookie(baseUrl))
|
||||||
|
}
|
||||||
|
|
||||||
|
object ShareCookieData {
|
||||||
|
val cookieName = "docspell_share"
|
||||||
|
val headerName = "Docspell-Share-Auth"
|
||||||
|
|
||||||
|
def fromCookie[F[_]](req: Request[F]): Option[String] =
|
||||||
|
for {
|
||||||
|
header <- req.headers.get[headers.Cookie]
|
||||||
|
cookie <- header.values.toList.find(_.name == cookieName)
|
||||||
|
} yield cookie.content
|
||||||
|
|
||||||
|
def fromHeader[F[_]](req: Request[F]): Option[String] =
|
||||||
|
req.headers
|
||||||
|
.get(CIString(headerName))
|
||||||
|
.map(_.head.value)
|
||||||
|
|
||||||
|
def fromRequest[F[_]](req: Request[F]): Option[String] =
|
||||||
|
fromCookie(req).orElse(fromHeader(req))
|
||||||
|
|
||||||
|
def delete(baseUrl: LenientUri): ResponseCookie =
|
||||||
|
ResponseCookie(
|
||||||
|
name = cookieName,
|
||||||
|
content = "",
|
||||||
|
domain = None,
|
||||||
|
path = Some(baseUrl.path / "api" / "v1").map(_.asString),
|
||||||
|
httpOnly = true,
|
||||||
|
secure = baseUrl.scheme.exists(_.endsWith("s")),
|
||||||
|
maxAge = None,
|
||||||
|
expires = None
|
||||||
|
)
|
||||||
|
|
||||||
|
}
|
@ -22,7 +22,7 @@ import docspell.common.syntax.all._
|
|||||||
import docspell.ftsclient.FtsResult
|
import docspell.ftsclient.FtsResult
|
||||||
import docspell.restapi.model._
|
import docspell.restapi.model._
|
||||||
import docspell.restserver.conv.Conversions._
|
import docspell.restserver.conv.Conversions._
|
||||||
import docspell.store.queries.{AttachmentLight => QAttachmentLight}
|
import docspell.store.queries.{AttachmentLight => QAttachmentLight, IdRefCount}
|
||||||
import docspell.store.records._
|
import docspell.store.records._
|
||||||
import docspell.store.{AddResult, UpdateResult}
|
import docspell.store.{AddResult, UpdateResult}
|
||||||
|
|
||||||
@ -38,9 +38,16 @@ trait Conversions {
|
|||||||
mkTagCloud(sum.tags),
|
mkTagCloud(sum.tags),
|
||||||
mkTagCategoryCloud(sum.cats),
|
mkTagCategoryCloud(sum.cats),
|
||||||
sum.fields.map(mkFieldStats),
|
sum.fields.map(mkFieldStats),
|
||||||
sum.folders.map(mkFolderStats)
|
sum.folders.map(mkFolderStats),
|
||||||
|
sum.corrOrgs.map(mkIdRefStats),
|
||||||
|
sum.corrPers.map(mkIdRefStats),
|
||||||
|
sum.concPers.map(mkIdRefStats),
|
||||||
|
sum.concEquip.map(mkIdRefStats)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def mkIdRefStats(s: IdRefCount): IdRefStats =
|
||||||
|
IdRefStats(mkIdName(s.ref), s.count)
|
||||||
|
|
||||||
def mkFolderStats(fs: docspell.store.queries.FolderCount): FolderStats =
|
def mkFolderStats(fs: docspell.store.queries.FolderCount): FolderStats =
|
||||||
FolderStats(fs.id, fs.name, mkIdName(fs.owner), fs.count)
|
FolderStats(fs.id, fs.name, mkIdName(fs.owner), fs.count)
|
||||||
|
|
||||||
|
@ -11,7 +11,10 @@ import cats.data.OptionT
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
|
import docspell.backend.ops.OItemSearch.{AttachmentData, AttachmentPreviewData}
|
||||||
import docspell.backend.ops._
|
import docspell.backend.ops._
|
||||||
|
import docspell.restapi.model.BasicResult
|
||||||
|
import docspell.restserver.http4s.{QueryParam => QP}
|
||||||
import docspell.store.records.RFileMeta
|
import docspell.store.records.RFileMeta
|
||||||
|
|
||||||
import org.http4s._
|
import org.http4s._
|
||||||
@ -23,6 +26,68 @@ import org.typelevel.ci.CIString
|
|||||||
|
|
||||||
object BinaryUtil {
|
object BinaryUtil {
|
||||||
|
|
||||||
|
def respond[F[_]: Async](dsl: Http4sDsl[F], req: Request[F])(
|
||||||
|
fileData: Option[AttachmentData[F]]
|
||||||
|
): F[Response[F]] = {
|
||||||
|
import dsl._
|
||||||
|
|
||||||
|
val inm = req.headers.get[`If-None-Match`].flatMap(_.tags)
|
||||||
|
val matches = BinaryUtil.matchETag(fileData.map(_.meta), inm)
|
||||||
|
fileData
|
||||||
|
.map { data =>
|
||||||
|
if (matches) withResponseHeaders(dsl, NotModified())(data)
|
||||||
|
else makeByteResp(dsl)(data)
|
||||||
|
}
|
||||||
|
.getOrElse(NotFound(BasicResult(false, "Not found")))
|
||||||
|
}
|
||||||
|
|
||||||
|
def respondHead[F[_]: Async](dsl: Http4sDsl[F])(
|
||||||
|
fileData: Option[AttachmentData[F]]
|
||||||
|
): F[Response[F]] = {
|
||||||
|
import dsl._
|
||||||
|
|
||||||
|
fileData
|
||||||
|
.map(data => withResponseHeaders(dsl, Ok())(data))
|
||||||
|
.getOrElse(NotFound(BasicResult(false, "Not found")))
|
||||||
|
}
|
||||||
|
|
||||||
|
def respondPreview[F[_]: Async](dsl: Http4sDsl[F], req: Request[F])(
|
||||||
|
fileData: Option[AttachmentPreviewData[F]]
|
||||||
|
): F[Response[F]] = {
|
||||||
|
import dsl._
|
||||||
|
def notFound =
|
||||||
|
NotFound(BasicResult(false, "Not found"))
|
||||||
|
|
||||||
|
QP.WithFallback.unapply(req.multiParams) match {
|
||||||
|
case Some(bool) =>
|
||||||
|
val fallback = bool.getOrElse(false)
|
||||||
|
val inm = req.headers.get[`If-None-Match`].flatMap(_.tags)
|
||||||
|
val matches = matchETag(fileData.map(_.meta), inm)
|
||||||
|
|
||||||
|
fileData
|
||||||
|
.map { data =>
|
||||||
|
if (matches) withResponseHeaders(dsl, NotModified())(data)
|
||||||
|
else makeByteResp(dsl)(data)
|
||||||
|
}
|
||||||
|
.getOrElse(
|
||||||
|
if (fallback) BinaryUtil.noPreview(req.some).getOrElseF(notFound)
|
||||||
|
else notFound
|
||||||
|
)
|
||||||
|
|
||||||
|
case None =>
|
||||||
|
BadRequest(BasicResult(false, "Invalid query parameter 'withFallback'"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def respondPreviewHead[F[_]: Async](
|
||||||
|
dsl: Http4sDsl[F]
|
||||||
|
)(fileData: Option[AttachmentPreviewData[F]]): F[Response[F]] = {
|
||||||
|
import dsl._
|
||||||
|
fileData
|
||||||
|
.map(data => withResponseHeaders(dsl, Ok())(data))
|
||||||
|
.getOrElse(NotFound(BasicResult(false, "Not found")))
|
||||||
|
}
|
||||||
|
|
||||||
def withResponseHeaders[F[_]: Sync](dsl: Http4sDsl[F], resp: F[Response[F]])(
|
def withResponseHeaders[F[_]: Sync](dsl: Http4sDsl[F], resp: F[Response[F]])(
|
||||||
data: OItemSearch.BinaryData[F]
|
data: OItemSearch.BinaryData[F]
|
||||||
): F[Response[F]] = {
|
): F[Response[F]] = {
|
||||||
|
@ -16,7 +16,7 @@ import docspell.common.SearchMode
|
|||||||
|
|
||||||
import org.http4s.ParseFailure
|
import org.http4s.ParseFailure
|
||||||
import org.http4s.QueryParamDecoder
|
import org.http4s.QueryParamDecoder
|
||||||
import org.http4s.dsl.impl.OptionalQueryParamDecoderMatcher
|
import org.http4s.dsl.impl.{FlagQueryParamMatcher, OptionalQueryParamDecoderMatcher}
|
||||||
|
|
||||||
object QueryParam {
|
object QueryParam {
|
||||||
case class QueryString(q: String)
|
case class QueryString(q: String)
|
||||||
@ -67,6 +67,7 @@ object QueryParam {
|
|||||||
object FullOpt extends OptionalQueryParamDecoderMatcher[Boolean]("full")
|
object FullOpt extends OptionalQueryParamDecoderMatcher[Boolean]("full")
|
||||||
|
|
||||||
object OwningOpt extends OptionalQueryParamDecoderMatcher[Boolean]("owning")
|
object OwningOpt extends OptionalQueryParamDecoderMatcher[Boolean]("owning")
|
||||||
|
object OwningFlag extends FlagQueryParamMatcher("owning")
|
||||||
|
|
||||||
object ContactKindOpt extends OptionalQueryParamDecoderMatcher[ContactKind]("kind")
|
object ContactKindOpt extends OptionalQueryParamDecoderMatcher[ContactKind]("kind")
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ import cats.data.{Kleisli, OptionT}
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
|
||||||
|
import docspell.common.Password
|
||||||
import docspell.restserver.Config
|
import docspell.restserver.Config
|
||||||
import docspell.restserver.http4s.Responses
|
import docspell.restserver.http4s.Responses
|
||||||
|
|
||||||
@ -19,7 +20,7 @@ import org.http4s.dsl.Http4sDsl
|
|||||||
import org.http4s.server._
|
import org.http4s.server._
|
||||||
import org.typelevel.ci.CIString
|
import org.typelevel.ci.CIString
|
||||||
|
|
||||||
object AdminRoutes {
|
object AdminAuth {
|
||||||
private val adminHeader = CIString("Docspell-Admin-Secret")
|
private val adminHeader = CIString("Docspell-Admin-Secret")
|
||||||
|
|
||||||
def apply[F[_]: Async](cfg: Config.AdminEndpoint)(
|
def apply[F[_]: Async](cfg: Config.AdminEndpoint)(
|
||||||
@ -55,6 +56,5 @@ object AdminRoutes {
|
|||||||
req.headers.get(adminHeader).map(_.head.value)
|
req.headers.get(adminHeader).map(_.head.value)
|
||||||
|
|
||||||
private def compareSecret(s1: String)(s2: String): Boolean =
|
private def compareSecret(s1: String)(s2: String): Boolean =
|
||||||
s1.length > 0 && s1.length == s2.length &&
|
Password(s1).compare(Password(s2))
|
||||||
s1.zip(s2).forall { case (a, b) => a == b }
|
|
||||||
}
|
}
|
@ -17,7 +17,6 @@ import docspell.common.MakePreviewArgs
|
|||||||
import docspell.restapi.model._
|
import docspell.restapi.model._
|
||||||
import docspell.restserver.conv.Conversions
|
import docspell.restserver.conv.Conversions
|
||||||
import docspell.restserver.http4s.BinaryUtil
|
import docspell.restserver.http4s.BinaryUtil
|
||||||
import docspell.restserver.http4s.{QueryParam => QP}
|
|
||||||
import docspell.restserver.webapp.Webjars
|
import docspell.restserver.webapp.Webjars
|
||||||
|
|
||||||
import org.http4s._
|
import org.http4s._
|
||||||
@ -47,24 +46,13 @@ object AttachmentRoutes {
|
|||||||
case HEAD -> Root / Ident(id) =>
|
case HEAD -> Root / Ident(id) =>
|
||||||
for {
|
for {
|
||||||
fileData <- backend.itemSearch.findAttachment(id, user.account.collective)
|
fileData <- backend.itemSearch.findAttachment(id, user.account.collective)
|
||||||
resp <-
|
resp <- BinaryUtil.respondHead(dsl)(fileData)
|
||||||
fileData
|
|
||||||
.map(data => withResponseHeaders(Ok())(data))
|
|
||||||
.getOrElse(NotFound(BasicResult(false, "Not found")))
|
|
||||||
} yield resp
|
} yield resp
|
||||||
|
|
||||||
case req @ GET -> Root / Ident(id) =>
|
case req @ GET -> Root / Ident(id) =>
|
||||||
for {
|
for {
|
||||||
fileData <- backend.itemSearch.findAttachment(id, user.account.collective)
|
fileData <- backend.itemSearch.findAttachment(id, user.account.collective)
|
||||||
inm = req.headers.get[`If-None-Match`].flatMap(_.tags)
|
resp <- BinaryUtil.respond[F](dsl, req)(fileData)
|
||||||
matches = BinaryUtil.matchETag(fileData.map(_.meta), inm)
|
|
||||||
resp <-
|
|
||||||
fileData
|
|
||||||
.map { data =>
|
|
||||||
if (matches) withResponseHeaders(NotModified())(data)
|
|
||||||
else makeByteResp(data)
|
|
||||||
}
|
|
||||||
.getOrElse(NotFound(BasicResult(false, "Not found")))
|
|
||||||
} yield resp
|
} yield resp
|
||||||
|
|
||||||
case HEAD -> Root / Ident(id) / "original" =>
|
case HEAD -> Root / Ident(id) / "original" =>
|
||||||
@ -115,35 +103,18 @@ object AttachmentRoutes {
|
|||||||
.getOrElse(NotFound(BasicResult(false, "Not found")))
|
.getOrElse(NotFound(BasicResult(false, "Not found")))
|
||||||
} yield resp
|
} yield resp
|
||||||
|
|
||||||
case req @ GET -> Root / Ident(id) / "preview" :? QP.WithFallback(flag) =>
|
case req @ GET -> Root / Ident(id) / "preview" =>
|
||||||
def notFound =
|
|
||||||
NotFound(BasicResult(false, "Not found"))
|
|
||||||
for {
|
for {
|
||||||
fileData <-
|
fileData <-
|
||||||
backend.itemSearch.findAttachmentPreview(id, user.account.collective)
|
backend.itemSearch.findAttachmentPreview(id, user.account.collective)
|
||||||
inm = req.headers.get[`If-None-Match`].flatMap(_.tags)
|
resp <- BinaryUtil.respondPreview(dsl, req)(fileData)
|
||||||
matches = BinaryUtil.matchETag(fileData.map(_.meta), inm)
|
|
||||||
fallback = flag.getOrElse(false)
|
|
||||||
resp <-
|
|
||||||
fileData
|
|
||||||
.map { data =>
|
|
||||||
if (matches) withResponseHeaders(NotModified())(data)
|
|
||||||
else makeByteResp(data)
|
|
||||||
}
|
|
||||||
.getOrElse(
|
|
||||||
if (fallback) BinaryUtil.noPreview(req.some).getOrElseF(notFound)
|
|
||||||
else notFound
|
|
||||||
)
|
|
||||||
} yield resp
|
} yield resp
|
||||||
|
|
||||||
case HEAD -> Root / Ident(id) / "preview" =>
|
case HEAD -> Root / Ident(id) / "preview" =>
|
||||||
for {
|
for {
|
||||||
fileData <-
|
fileData <-
|
||||||
backend.itemSearch.findAttachmentPreview(id, user.account.collective)
|
backend.itemSearch.findAttachmentPreview(id, user.account.collective)
|
||||||
resp <-
|
resp <- BinaryUtil.respondPreviewHead(dsl)(fileData)
|
||||||
fileData
|
|
||||||
.map(data => withResponseHeaders(Ok())(data))
|
|
||||||
.getOrElse(NotFound(BasicResult(false, "Not found")))
|
|
||||||
} yield resp
|
} yield resp
|
||||||
|
|
||||||
case POST -> Root / Ident(id) / "preview" =>
|
case POST -> Root / Ident(id) / "preview" =>
|
||||||
|
@ -12,8 +12,7 @@ import cats.implicits._
|
|||||||
import docspell.backend.BackendApp
|
import docspell.backend.BackendApp
|
||||||
import docspell.backend.auth.AuthToken
|
import docspell.backend.auth.AuthToken
|
||||||
import docspell.backend.ops.OCollective
|
import docspell.backend.ops.OCollective
|
||||||
import docspell.common.EmptyTrashArgs
|
import docspell.common._
|
||||||
import docspell.common.ListType
|
|
||||||
import docspell.restapi.model._
|
import docspell.restapi.model._
|
||||||
import docspell.restserver.conv.Conversions
|
import docspell.restserver.conv.Conversions
|
||||||
import docspell.restserver.http4s._
|
import docspell.restserver.http4s._
|
||||||
@ -62,7 +61,8 @@ object CollectiveRoutes {
|
|||||||
settings.emptyTrash.schedule,
|
settings.emptyTrash.schedule,
|
||||||
settings.emptyTrash.minAge
|
settings.emptyTrash.minAge
|
||||||
)
|
)
|
||||||
)
|
),
|
||||||
|
settings.passwords.map(Password.apply)
|
||||||
)
|
)
|
||||||
res <-
|
res <-
|
||||||
backend.collective
|
backend.collective
|
||||||
@ -89,7 +89,8 @@ object CollectiveRoutes {
|
|||||||
EmptyTrashSetting(
|
EmptyTrashSetting(
|
||||||
trash.schedule,
|
trash.schedule,
|
||||||
trash.minAge
|
trash.minAge
|
||||||
)
|
),
|
||||||
|
settDb.map(_.passwords).getOrElse(Nil).map(_.pass)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
resp <- sett.toResponse()
|
resp <- sett.toResponse()
|
||||||
|
@ -28,11 +28,11 @@ import docspell.restserver.http4s.BinaryUtil
|
|||||||
import docspell.restserver.http4s.Responses
|
import docspell.restserver.http4s.Responses
|
||||||
import docspell.restserver.http4s.{QueryParam => QP}
|
import docspell.restserver.http4s.{QueryParam => QP}
|
||||||
|
|
||||||
import org.http4s.HttpRoutes
|
|
||||||
import org.http4s.circe.CirceEntityDecoder._
|
import org.http4s.circe.CirceEntityDecoder._
|
||||||
import org.http4s.circe.CirceEntityEncoder._
|
import org.http4s.circe.CirceEntityEncoder._
|
||||||
import org.http4s.dsl.Http4sDsl
|
import org.http4s.dsl.Http4sDsl
|
||||||
import org.http4s.headers._
|
import org.http4s.headers._
|
||||||
|
import org.http4s.{HttpRoutes, Response}
|
||||||
import org.log4s._
|
import org.log4s._
|
||||||
|
|
||||||
object ItemRoutes {
|
object ItemRoutes {
|
||||||
@ -415,7 +415,11 @@ object ItemRoutes {
|
|||||||
def searchItems[F[_]: Sync](
|
def searchItems[F[_]: Sync](
|
||||||
backend: BackendApp[F],
|
backend: BackendApp[F],
|
||||||
dsl: Http4sDsl[F]
|
dsl: Http4sDsl[F]
|
||||||
)(settings: OSimpleSearch.Settings, fixQuery: Query.Fix, itemQuery: ItemQueryString) = {
|
)(
|
||||||
|
settings: OSimpleSearch.Settings,
|
||||||
|
fixQuery: Query.Fix,
|
||||||
|
itemQuery: ItemQueryString
|
||||||
|
): F[Response[F]] = {
|
||||||
import dsl._
|
import dsl._
|
||||||
|
|
||||||
def convertFts(res: OSimpleSearch.Items.FtsItems): ItemLightList =
|
def convertFts(res: OSimpleSearch.Items.FtsItems): ItemLightList =
|
||||||
@ -452,14 +456,14 @@ object ItemRoutes {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private def searchItemStats[F[_]: Sync](
|
def searchItemStats[F[_]: Sync](
|
||||||
backend: BackendApp[F],
|
backend: BackendApp[F],
|
||||||
dsl: Http4sDsl[F]
|
dsl: Http4sDsl[F]
|
||||||
)(
|
)(
|
||||||
settings: OSimpleSearch.StatsSettings,
|
settings: OSimpleSearch.StatsSettings,
|
||||||
fixQuery: Query.Fix,
|
fixQuery: Query.Fix,
|
||||||
itemQuery: ItemQueryString
|
itemQuery: ItemQueryString
|
||||||
) = {
|
): F[Response[F]] = {
|
||||||
import dsl._
|
import dsl._
|
||||||
|
|
||||||
backend.simpleSearch
|
backend.simpleSearch
|
||||||
@ -479,7 +483,6 @@ object ItemRoutes {
|
|||||||
case StringSearchResult.ParseFailed(pf) =>
|
case StringSearchResult.ParseFailed(pf) =>
|
||||||
BadRequest(BasicResult(false, s"Error reading query: ${pf.render}"))
|
BadRequest(BasicResult(false, s"Error reading query: ${pf.render}"))
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit final class OptionString(opt: Option[String]) {
|
implicit final class OptionString(opt: Option[String]) {
|
||||||
|
@ -11,8 +11,7 @@ import cats.implicits._
|
|||||||
|
|
||||||
import docspell.backend.BackendApp
|
import docspell.backend.BackendApp
|
||||||
import docspell.backend.auth.AuthToken
|
import docspell.backend.auth.AuthToken
|
||||||
import docspell.backend.ops.OMail.{AttachSelection, ItemMail}
|
import docspell.backend.ops.OMail.{AttachSelection, ItemMail, SendResult}
|
||||||
import docspell.backend.ops.SendResult
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.restapi.model._
|
import docspell.restapi.model._
|
||||||
|
|
||||||
|
@ -0,0 +1,64 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.restserver.routes
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.implicits._
|
||||||
|
|
||||||
|
import docspell.backend.BackendApp
|
||||||
|
import docspell.backend.auth.ShareToken
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.restserver.http4s.BinaryUtil
|
||||||
|
import docspell.restserver.webapp.Webjars
|
||||||
|
|
||||||
|
import org.http4s._
|
||||||
|
import org.http4s.dsl.Http4sDsl
|
||||||
|
import org.http4s.headers._
|
||||||
|
|
||||||
|
object ShareAttachmentRoutes {
|
||||||
|
|
||||||
|
def apply[F[_]: Async](
|
||||||
|
backend: BackendApp[F],
|
||||||
|
token: ShareToken
|
||||||
|
): HttpRoutes[F] = {
|
||||||
|
val dsl = new Http4sDsl[F] {}
|
||||||
|
import dsl._
|
||||||
|
|
||||||
|
HttpRoutes.of {
|
||||||
|
case HEAD -> Root / Ident(id) =>
|
||||||
|
for {
|
||||||
|
fileData <- backend.share.findAttachment(id, token.id).value
|
||||||
|
resp <- BinaryUtil.respondHead(dsl)(fileData)
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ GET -> Root / Ident(id) =>
|
||||||
|
for {
|
||||||
|
fileData <- backend.share.findAttachment(id, token.id).value
|
||||||
|
resp <- BinaryUtil.respond(dsl, req)(fileData)
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case GET -> Root / Ident(id) / "view" =>
|
||||||
|
// this route exists to provide a stable url
|
||||||
|
// it redirects currently to viewerjs
|
||||||
|
val attachUrl = s"/api/v1/share/attachment/${id.id}"
|
||||||
|
val path = s"/app/assets${Webjars.viewerjs}/index.html#$attachUrl"
|
||||||
|
SeeOther(Location(Uri(path = Uri.Path.unsafeFromString(path))))
|
||||||
|
|
||||||
|
case req @ GET -> Root / Ident(id) / "preview" =>
|
||||||
|
for {
|
||||||
|
fileData <- backend.share.findAttachmentPreview(id, token.id).value
|
||||||
|
resp <- BinaryUtil.respondPreview(dsl, req)(fileData)
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case HEAD -> Root / Ident(id) / "preview" =>
|
||||||
|
for {
|
||||||
|
fileData <- backend.share.findAttachmentPreview(id, token.id).value
|
||||||
|
resp <- BinaryUtil.respondPreviewHead(dsl)(fileData)
|
||||||
|
} yield resp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,73 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.restserver.routes
|
||||||
|
|
||||||
|
import cats.data.{Kleisli, OptionT}
|
||||||
|
import cats.effect._
|
||||||
|
import cats.implicits._
|
||||||
|
|
||||||
|
import docspell.backend.auth.{Login, ShareToken}
|
||||||
|
import docspell.backend.ops.OShare
|
||||||
|
import docspell.backend.ops.OShare.VerifyResult
|
||||||
|
import docspell.restserver.auth.ShareCookieData
|
||||||
|
|
||||||
|
import org.http4s._
|
||||||
|
import org.http4s.circe.CirceEntityEncoder._
|
||||||
|
import org.http4s.dsl.Http4sDsl
|
||||||
|
import org.http4s.server._
|
||||||
|
|
||||||
|
object ShareAuth {
|
||||||
|
|
||||||
|
def authenticateRequest[F[_]: Async](
|
||||||
|
validate: String => F[VerifyResult]
|
||||||
|
)(req: Request[F]): F[OShare.VerifyResult] =
|
||||||
|
ShareCookieData.fromRequest(req) match {
|
||||||
|
case Some(tokenStr) =>
|
||||||
|
validate(tokenStr)
|
||||||
|
case None =>
|
||||||
|
VerifyResult.notFound.pure[F]
|
||||||
|
}
|
||||||
|
|
||||||
|
private def getToken[F[_]: Async](
|
||||||
|
auth: String => F[VerifyResult]
|
||||||
|
): Kleisli[F, Request[F], Either[String, ShareToken]] =
|
||||||
|
Kleisli(r => authenticateRequest(auth)(r).map(_.toEither))
|
||||||
|
|
||||||
|
def of[F[_]: Async](S: OShare[F], cfg: Login.Config)(
|
||||||
|
pf: PartialFunction[AuthedRequest[F, ShareToken], F[Response[F]]]
|
||||||
|
): HttpRoutes[F] = {
|
||||||
|
val dsl: Http4sDsl[F] = new Http4sDsl[F] {}
|
||||||
|
import dsl._
|
||||||
|
|
||||||
|
val authUser = getToken[F](S.verifyToken(cfg.serverSecret))
|
||||||
|
|
||||||
|
val onFailure: AuthedRoutes[String, F] =
|
||||||
|
Kleisli(req => OptionT.liftF(Forbidden(req.context)))
|
||||||
|
|
||||||
|
val middleware: AuthMiddleware[F, ShareToken] =
|
||||||
|
AuthMiddleware(authUser, onFailure)
|
||||||
|
|
||||||
|
middleware(AuthedRoutes.of(pf))
|
||||||
|
}
|
||||||
|
|
||||||
|
def apply[F[_]: Async](S: OShare[F], cfg: Login.Config)(
|
||||||
|
f: ShareToken => HttpRoutes[F]
|
||||||
|
): HttpRoutes[F] = {
|
||||||
|
val dsl: Http4sDsl[F] = new Http4sDsl[F] {}
|
||||||
|
import dsl._
|
||||||
|
|
||||||
|
val authUser = getToken[F](S.verifyToken(cfg.serverSecret))
|
||||||
|
|
||||||
|
val onFailure: AuthedRoutes[String, F] =
|
||||||
|
Kleisli(req => OptionT.liftF(Forbidden(req.context)))
|
||||||
|
|
||||||
|
val middleware: AuthMiddleware[F, ShareToken] =
|
||||||
|
AuthMiddleware(authUser, onFailure)
|
||||||
|
|
||||||
|
middleware(AuthedRoutes(authReq => f(authReq.context).run(authReq.req)))
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,41 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.restserver.routes
|
||||||
|
import cats.effect._
|
||||||
|
import cats.implicits._
|
||||||
|
|
||||||
|
import docspell.backend.BackendApp
|
||||||
|
import docspell.backend.auth.ShareToken
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.restapi.model.BasicResult
|
||||||
|
import docspell.restserver.conv.Conversions
|
||||||
|
|
||||||
|
import org.http4s._
|
||||||
|
import org.http4s.circe.CirceEntityEncoder._
|
||||||
|
import org.http4s.dsl.Http4sDsl
|
||||||
|
|
||||||
|
object ShareItemRoutes {
|
||||||
|
|
||||||
|
def apply[F[_]: Async](
|
||||||
|
backend: BackendApp[F],
|
||||||
|
token: ShareToken
|
||||||
|
): HttpRoutes[F] = {
|
||||||
|
val dsl = new Http4sDsl[F] {}
|
||||||
|
import dsl._
|
||||||
|
|
||||||
|
HttpRoutes.of { case GET -> Root / Ident(id) =>
|
||||||
|
for {
|
||||||
|
item <- backend.share.findItem(id, token.id).value
|
||||||
|
result = item.map(Conversions.mkItemDetail)
|
||||||
|
resp <-
|
||||||
|
result
|
||||||
|
.map(r => Ok(r))
|
||||||
|
.getOrElse(NotFound(BasicResult(false, "Not found.")))
|
||||||
|
} yield resp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,178 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.restserver.routes
|
||||||
|
|
||||||
|
import cats.data.OptionT
|
||||||
|
import cats.effect._
|
||||||
|
import cats.implicits._
|
||||||
|
|
||||||
|
import docspell.backend.BackendApp
|
||||||
|
import docspell.backend.auth.AuthToken
|
||||||
|
import docspell.backend.ops.OShare
|
||||||
|
import docspell.backend.ops.OShare.{SendResult, ShareMail, VerifyResult}
|
||||||
|
import docspell.common.{Ident, Timestamp}
|
||||||
|
import docspell.restapi.model._
|
||||||
|
import docspell.restserver.Config
|
||||||
|
import docspell.restserver.auth.ShareCookieData
|
||||||
|
import docspell.restserver.http4s.{ClientRequestInfo, QueryParam => QP, ResponseGenerator}
|
||||||
|
|
||||||
|
import emil.MailAddress
|
||||||
|
import emil.javamail.syntax._
|
||||||
|
import org.http4s.HttpRoutes
|
||||||
|
import org.http4s.circe.CirceEntityDecoder._
|
||||||
|
import org.http4s.circe.CirceEntityEncoder._
|
||||||
|
import org.http4s.dsl.Http4sDsl
|
||||||
|
|
||||||
|
object ShareRoutes {
|
||||||
|
|
||||||
|
def manage[F[_]: Async](backend: BackendApp[F], user: AuthToken): HttpRoutes[F] = {
|
||||||
|
val dsl = new Http4sDsl[F] with ResponseGenerator[F] {}
|
||||||
|
import dsl._
|
||||||
|
|
||||||
|
HttpRoutes.of {
|
||||||
|
case GET -> Root :? QP.Query(q) :? QP.OwningFlag(owning) =>
|
||||||
|
val login = if (owning) Some(user.account.user) else None
|
||||||
|
for {
|
||||||
|
all <- backend.share.findAll(user.account.collective, login, q)
|
||||||
|
now <- Timestamp.current[F]
|
||||||
|
res <- Ok(ShareList(all.map(mkShareDetail(now))))
|
||||||
|
} yield res
|
||||||
|
|
||||||
|
case req @ POST -> Root =>
|
||||||
|
for {
|
||||||
|
data <- req.as[ShareData]
|
||||||
|
share = mkNewShare(data, user)
|
||||||
|
res <- backend.share.addNew(share)
|
||||||
|
resp <- Ok(mkIdResult(res, "New share created."))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case GET -> Root / Ident(id) =>
|
||||||
|
(for {
|
||||||
|
share <- backend.share.findOne(id, user.account.collective)
|
||||||
|
now <- OptionT.liftF(Timestamp.current[F])
|
||||||
|
resp <- OptionT.liftF(Ok(mkShareDetail(now)(share)))
|
||||||
|
} yield resp).getOrElseF(NotFound())
|
||||||
|
|
||||||
|
case req @ PUT -> Root / Ident(id) =>
|
||||||
|
for {
|
||||||
|
data <- req.as[ShareData]
|
||||||
|
share = mkNewShare(data, user)
|
||||||
|
updated <- backend.share.update(id, share, data.removePassword.getOrElse(false))
|
||||||
|
resp <- Ok(mkBasicResult(updated, "Share updated."))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case DELETE -> Root / Ident(id) =>
|
||||||
|
for {
|
||||||
|
del <- backend.share.delete(id, user.account.collective)
|
||||||
|
resp <- Ok(BasicResult(del, if (del) "Share deleted." else "Deleting failed."))
|
||||||
|
} yield resp
|
||||||
|
|
||||||
|
case req @ POST -> Root / "email" / "send" / Ident(name) =>
|
||||||
|
for {
|
||||||
|
in <- req.as[SimpleShareMail]
|
||||||
|
mail = convertIn(in)
|
||||||
|
res <- mail.traverse(m => backend.share.sendMail(user.account, name, m))
|
||||||
|
resp <- res.fold(
|
||||||
|
err => Ok(BasicResult(false, s"Invalid mail data: $err")),
|
||||||
|
res => Ok(convertOut(res))
|
||||||
|
)
|
||||||
|
} yield resp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def verify[F[_]: Async](backend: BackendApp[F], cfg: Config): HttpRoutes[F] = {
|
||||||
|
val dsl = new Http4sDsl[F] with ResponseGenerator[F] {}
|
||||||
|
import dsl._
|
||||||
|
|
||||||
|
HttpRoutes.of { case req @ POST -> Root / "verify" =>
|
||||||
|
for {
|
||||||
|
secret <- req.as[ShareSecret]
|
||||||
|
res <- backend.share
|
||||||
|
.verify(cfg.auth.serverSecret)(secret.shareId, secret.password)
|
||||||
|
resp <- res match {
|
||||||
|
case VerifyResult.Success(token, name) =>
|
||||||
|
val cd = ShareCookieData(token)
|
||||||
|
Ok(ShareVerifyResult(true, token.asString, false, "Success", name))
|
||||||
|
.map(cd.addCookie(ClientRequestInfo.getBaseUrl(cfg, req)))
|
||||||
|
case VerifyResult.PasswordMismatch =>
|
||||||
|
Ok(ShareVerifyResult(false, "", true, "Failed", None))
|
||||||
|
case VerifyResult.NotFound =>
|
||||||
|
Ok(ShareVerifyResult(false, "", false, "Failed", None))
|
||||||
|
case VerifyResult.InvalidToken =>
|
||||||
|
Ok(ShareVerifyResult(false, "", false, "Failed", None))
|
||||||
|
}
|
||||||
|
} yield resp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def mkNewShare(data: ShareData, user: AuthToken): OShare.NewShare =
|
||||||
|
OShare.NewShare(
|
||||||
|
user.account,
|
||||||
|
data.name,
|
||||||
|
data.query,
|
||||||
|
data.enabled,
|
||||||
|
data.password,
|
||||||
|
data.publishUntil
|
||||||
|
)
|
||||||
|
|
||||||
|
def mkIdResult(r: OShare.ChangeResult, msg: => String): IdResult =
|
||||||
|
r match {
|
||||||
|
case OShare.ChangeResult.Success(id) => IdResult(true, msg, id)
|
||||||
|
case OShare.ChangeResult.PublishUntilInPast =>
|
||||||
|
IdResult(false, "Until date must not be in the past", Ident.unsafe(""))
|
||||||
|
case OShare.ChangeResult.NotFound =>
|
||||||
|
IdResult(
|
||||||
|
false,
|
||||||
|
"Share not found or not owner. Only the owner can update a share.",
|
||||||
|
Ident.unsafe("")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def mkBasicResult(r: OShare.ChangeResult, msg: => String): BasicResult =
|
||||||
|
r match {
|
||||||
|
case OShare.ChangeResult.Success(_) => BasicResult(true, msg)
|
||||||
|
case OShare.ChangeResult.PublishUntilInPast =>
|
||||||
|
BasicResult(false, "Until date must not be in the past")
|
||||||
|
case OShare.ChangeResult.NotFound =>
|
||||||
|
BasicResult(
|
||||||
|
false,
|
||||||
|
"Share not found or not owner. Only the owner can update a share."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def mkShareDetail(now: Timestamp)(r: OShare.ShareData): ShareDetail =
|
||||||
|
ShareDetail(
|
||||||
|
r.share.id,
|
||||||
|
r.share.query,
|
||||||
|
IdName(r.user.uid, r.user.login.id),
|
||||||
|
r.share.name,
|
||||||
|
r.share.enabled,
|
||||||
|
r.share.publishAt,
|
||||||
|
r.share.publishUntil,
|
||||||
|
now > r.share.publishUntil,
|
||||||
|
r.share.password.isDefined,
|
||||||
|
r.share.views,
|
||||||
|
r.share.lastAccess
|
||||||
|
)
|
||||||
|
|
||||||
|
def convertIn(s: SimpleShareMail): Either[String, ShareMail] =
|
||||||
|
for {
|
||||||
|
rec <- s.recipients.traverse(MailAddress.parse)
|
||||||
|
cc <- s.cc.traverse(MailAddress.parse)
|
||||||
|
bcc <- s.bcc.traverse(MailAddress.parse)
|
||||||
|
} yield ShareMail(s.shareId, s.subject, rec, cc, bcc, s.body)
|
||||||
|
|
||||||
|
def convertOut(res: SendResult): BasicResult =
|
||||||
|
res match {
|
||||||
|
case SendResult.Success(_) =>
|
||||||
|
BasicResult(true, "Mail sent.")
|
||||||
|
case SendResult.SendFailure(ex) =>
|
||||||
|
BasicResult(false, s"Mail sending failed: ${ex.getMessage}")
|
||||||
|
case SendResult.NotFound =>
|
||||||
|
BasicResult(false, s"There was no mail-connection or item found.")
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,105 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.restserver.routes
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.implicits._
|
||||||
|
|
||||||
|
import docspell.backend.BackendApp
|
||||||
|
import docspell.backend.auth.ShareToken
|
||||||
|
import docspell.backend.ops.OSimpleSearch
|
||||||
|
import docspell.backend.ops.OSimpleSearch.StringSearchResult
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.query.FulltextExtract.Result.{TooMany, UnsupportedPosition}
|
||||||
|
import docspell.restapi.model._
|
||||||
|
import docspell.restserver.Config
|
||||||
|
import docspell.restserver.conv.Conversions
|
||||||
|
import docspell.store.qb.Batch
|
||||||
|
import docspell.store.queries.{Query, SearchSummary}
|
||||||
|
|
||||||
|
import org.http4s.circe.CirceEntityDecoder._
|
||||||
|
import org.http4s.circe.CirceEntityEncoder._
|
||||||
|
import org.http4s.dsl.Http4sDsl
|
||||||
|
import org.http4s.{HttpRoutes, Response}
|
||||||
|
|
||||||
|
object ShareSearchRoutes {
|
||||||
|
|
||||||
|
def apply[F[_]: Async](
|
||||||
|
backend: BackendApp[F],
|
||||||
|
cfg: Config,
|
||||||
|
token: ShareToken
|
||||||
|
): HttpRoutes[F] = {
|
||||||
|
val logger = Logger.log4s[F](org.log4s.getLogger)
|
||||||
|
|
||||||
|
val dsl = new Http4sDsl[F] {}
|
||||||
|
import dsl._
|
||||||
|
|
||||||
|
HttpRoutes.of {
|
||||||
|
case req @ POST -> Root / "query" =>
|
||||||
|
backend.share
|
||||||
|
.findShareQuery(token.id)
|
||||||
|
.semiflatMap { share =>
|
||||||
|
for {
|
||||||
|
userQuery <- req.as[ItemQuery]
|
||||||
|
batch = Batch(
|
||||||
|
userQuery.offset.getOrElse(0),
|
||||||
|
userQuery.limit.getOrElse(cfg.maxItemPageSize)
|
||||||
|
).restrictLimitTo(
|
||||||
|
cfg.maxItemPageSize
|
||||||
|
)
|
||||||
|
itemQuery = ItemQueryString(userQuery.query)
|
||||||
|
settings = OSimpleSearch.Settings(
|
||||||
|
batch,
|
||||||
|
cfg.fullTextSearch.enabled,
|
||||||
|
userQuery.withDetails.getOrElse(false),
|
||||||
|
cfg.maxNoteLength,
|
||||||
|
searchMode = SearchMode.Normal
|
||||||
|
)
|
||||||
|
account = share.account
|
||||||
|
fixQuery = Query.Fix(account, Some(share.query.expr), None)
|
||||||
|
_ <- logger.debug(s"Searching in share ${share.id.id}: ${userQuery.query}")
|
||||||
|
resp <- ItemRoutes.searchItems(backend, dsl)(settings, fixQuery, itemQuery)
|
||||||
|
} yield resp
|
||||||
|
}
|
||||||
|
.getOrElseF(NotFound())
|
||||||
|
|
||||||
|
case req @ POST -> Root / "stats" =>
|
||||||
|
for {
|
||||||
|
userQuery <- req.as[ItemQuery]
|
||||||
|
itemQuery = ItemQueryString(userQuery.query)
|
||||||
|
settings = OSimpleSearch.StatsSettings(
|
||||||
|
useFTS = cfg.fullTextSearch.enabled,
|
||||||
|
searchMode = userQuery.searchMode.getOrElse(SearchMode.Normal)
|
||||||
|
)
|
||||||
|
stats <- backend.share.searchSummary(settings)(token.id, itemQuery).value
|
||||||
|
resp <- stats.map(mkSummaryResponse(dsl)).getOrElse(NotFound())
|
||||||
|
} yield resp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def mkSummaryResponse[F[_]: Sync](
|
||||||
|
dsl: Http4sDsl[F]
|
||||||
|
)(r: StringSearchResult[SearchSummary]): F[Response[F]] = {
|
||||||
|
import dsl._
|
||||||
|
r match {
|
||||||
|
case StringSearchResult.Success(summary) =>
|
||||||
|
Ok(Conversions.mkSearchStats(summary))
|
||||||
|
case StringSearchResult.FulltextMismatch(TooMany) =>
|
||||||
|
BadRequest(BasicResult(false, "Fulltext search is not possible in this share."))
|
||||||
|
case StringSearchResult.FulltextMismatch(UnsupportedPosition) =>
|
||||||
|
BadRequest(
|
||||||
|
BasicResult(
|
||||||
|
false,
|
||||||
|
"Fulltext search must be in root position or inside the first AND."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
case StringSearchResult.ParseFailed(pf) =>
|
||||||
|
BadRequest(BasicResult(false, s"Error reading query: ${pf.render}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -72,7 +72,9 @@ object UserRoutes {
|
|||||||
data <- backend.collective.getDeleteUserData(
|
data <- backend.collective.getDeleteUserData(
|
||||||
AccountId(user.account.collective, username)
|
AccountId(user.account.collective, username)
|
||||||
)
|
)
|
||||||
resp <- Ok(DeleteUserData(data.ownedFolders.map(_.id), data.sentMails))
|
resp <- Ok(
|
||||||
|
DeleteUserData(data.ownedFolders.map(_.id), data.sentMails, data.shares)
|
||||||
|
)
|
||||||
} yield resp
|
} yield resp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -43,8 +43,21 @@
|
|||||||
// this is required for transitioning; elm fails to parse the account
|
// this is required for transitioning; elm fails to parse the account
|
||||||
account["requireSecondFactor"] = false;
|
account["requireSecondFactor"] = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// hack to guess if the browser can display PDFs natively. It
|
||||||
|
// seems that almost all browsers allow to query the
|
||||||
|
// navigator.mimeTypes array, except firefox.
|
||||||
|
var ua = navigator.userAgent.toLowerCase();
|
||||||
|
var pdfSupported = false;
|
||||||
|
if (ua.indexOf("firefox") > -1) {
|
||||||
|
pdfSupported = ua.indexOf("mobile") == -1;
|
||||||
|
} else {
|
||||||
|
pdfSupported = "application/pdf" in navigator.mimeTypes;
|
||||||
|
}
|
||||||
|
|
||||||
var elmFlags = {
|
var elmFlags = {
|
||||||
"account": account,
|
"account": account,
|
||||||
|
"pdfSupported": pdfSupported,
|
||||||
"config": {{{flagsJson}}}
|
"config": {{{flagsJson}}}
|
||||||
};
|
};
|
||||||
</script>
|
</script>
|
||||||
|
@ -0,0 +1,7 @@
|
|||||||
|
CREATE TABLE "collective_password" (
|
||||||
|
"id" varchar(254) not null primary key,
|
||||||
|
"cid" varchar(254) not null,
|
||||||
|
"pass" varchar(254) not null,
|
||||||
|
"created" timestamp not null,
|
||||||
|
foreign key ("cid") references "collective"("cid") on delete cascade
|
||||||
|
)
|
@ -0,0 +1,13 @@
|
|||||||
|
CREATE TABLE "item_share" (
|
||||||
|
"id" varchar(254) not null primary key,
|
||||||
|
"user_id" varchar(254) not null,
|
||||||
|
"name" varchar(254),
|
||||||
|
"query" varchar(2000) not null,
|
||||||
|
"enabled" boolean not null,
|
||||||
|
"pass" varchar(254),
|
||||||
|
"publish_at" timestamp not null,
|
||||||
|
"publish_until" timestamp not null,
|
||||||
|
"views" int not null,
|
||||||
|
"last_access" timestamp,
|
||||||
|
foreign key ("user_id") references "user_"("uid") on delete cascade
|
||||||
|
)
|
@ -0,0 +1,7 @@
|
|||||||
|
CREATE TABLE `collective_password` (
|
||||||
|
`id` varchar(254) not null primary key,
|
||||||
|
`cid` varchar(254) not null,
|
||||||
|
`pass` varchar(254) not null,
|
||||||
|
`created` timestamp not null,
|
||||||
|
foreign key (`cid`) references `collective`(`cid`) on delete cascade
|
||||||
|
)
|
@ -0,0 +1,13 @@
|
|||||||
|
CREATE TABLE `item_share` (
|
||||||
|
`id` varchar(254) not null primary key,
|
||||||
|
`user_id` varchar(254) not null,
|
||||||
|
`name` varchar(254),
|
||||||
|
`query` varchar(2000) not null,
|
||||||
|
`enabled` boolean not null,
|
||||||
|
`pass` varchar(254),
|
||||||
|
`publish_at` timestamp not null,
|
||||||
|
`publish_until` timestamp not null,
|
||||||
|
`views` int not null,
|
||||||
|
`last_access` timestamp,
|
||||||
|
foreign key (`user_id`) references `user_`(`uid`) on delete cascade
|
||||||
|
)
|
@ -0,0 +1,7 @@
|
|||||||
|
CREATE TABLE "collective_password" (
|
||||||
|
"id" varchar(254) not null primary key,
|
||||||
|
"cid" varchar(254) not null,
|
||||||
|
"pass" varchar(254) not null,
|
||||||
|
"created" timestamp not null,
|
||||||
|
foreign key ("cid") references "collective"("cid") on delete cascade
|
||||||
|
)
|
@ -0,0 +1,13 @@
|
|||||||
|
CREATE TABLE "item_share" (
|
||||||
|
"id" varchar(254) not null primary key,
|
||||||
|
"user_id" varchar(254) not null,
|
||||||
|
"name" varchar(254),
|
||||||
|
"query" varchar(2000) not null,
|
||||||
|
"enabled" boolean not null,
|
||||||
|
"pass" varchar(254),
|
||||||
|
"publish_at" timestamp not null,
|
||||||
|
"publish_until" timestamp not null,
|
||||||
|
"views" int not null,
|
||||||
|
"last_access" timestamp,
|
||||||
|
foreign key ("user_id") references "user_"("uid") on delete cascade
|
||||||
|
)
|
@ -9,6 +9,7 @@ package docspell.store
|
|||||||
import scala.concurrent.ExecutionContext
|
import scala.concurrent.ExecutionContext
|
||||||
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
|
import cats.~>
|
||||||
import fs2._
|
import fs2._
|
||||||
|
|
||||||
import docspell.store.file.FileStore
|
import docspell.store.file.FileStore
|
||||||
@ -19,6 +20,7 @@ import doobie._
|
|||||||
import doobie.hikari.HikariTransactor
|
import doobie.hikari.HikariTransactor
|
||||||
|
|
||||||
trait Store[F[_]] {
|
trait Store[F[_]] {
|
||||||
|
def transform: ConnectionIO ~> F
|
||||||
|
|
||||||
def transact[A](prg: ConnectionIO[A]): F[A]
|
def transact[A](prg: ConnectionIO[A]): F[A]
|
||||||
|
|
||||||
|
@ -11,6 +11,7 @@ import java.time.{Instant, LocalDate}
|
|||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.common.syntax.all._
|
import docspell.common.syntax.all._
|
||||||
|
import docspell.query.{ItemQuery, ItemQueryParser}
|
||||||
import docspell.totp.Key
|
import docspell.totp.Key
|
||||||
|
|
||||||
import com.github.eikek.calev.CalEvent
|
import com.github.eikek.calev.CalEvent
|
||||||
@ -142,6 +143,11 @@ trait DoobieMeta extends EmilDoobieMeta {
|
|||||||
|
|
||||||
implicit val metaByteSize: Meta[ByteSize] =
|
implicit val metaByteSize: Meta[ByteSize] =
|
||||||
Meta[Long].timap(ByteSize.apply)(_.bytes)
|
Meta[Long].timap(ByteSize.apply)(_.bytes)
|
||||||
|
|
||||||
|
implicit val metaItemQuery: Meta[ItemQuery] =
|
||||||
|
Meta[String].timap(s => ItemQueryParser.parseUnsafe(s))(q =>
|
||||||
|
q.raw.getOrElse(ItemQueryParser.unsafeAsString(q.expr))
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
object DoobieMeta extends DoobieMeta {
|
object DoobieMeta extends DoobieMeta {
|
||||||
|
@ -6,8 +6,10 @@
|
|||||||
|
|
||||||
package docspell.store.impl
|
package docspell.store.impl
|
||||||
|
|
||||||
|
import cats.arrow.FunctionK
|
||||||
import cats.effect.Async
|
import cats.effect.Async
|
||||||
import cats.implicits._
|
import cats.implicits._
|
||||||
|
import cats.~>
|
||||||
|
|
||||||
import docspell.store.file.FileStore
|
import docspell.store.file.FileStore
|
||||||
import docspell.store.migrate.FlywayMigrate
|
import docspell.store.migrate.FlywayMigrate
|
||||||
@ -22,6 +24,9 @@ final class StoreImpl[F[_]: Async](
|
|||||||
xa: Transactor[F]
|
xa: Transactor[F]
|
||||||
) extends Store[F] {
|
) extends Store[F] {
|
||||||
|
|
||||||
|
def transform: ConnectionIO ~> F =
|
||||||
|
FunctionK.lift(transact)
|
||||||
|
|
||||||
def migrate: F[Int] =
|
def migrate: F[Int] =
|
||||||
FlywayMigrate.run[F](jdbc).map(_.migrationsExecuted)
|
FlywayMigrate.run[F](jdbc).map(_.migrationsExecuted)
|
||||||
|
|
||||||
|
@ -0,0 +1,11 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.queries
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
|
||||||
|
final case class IdRefCount(ref: IdRef, count: Int) {}
|
@ -192,7 +192,21 @@ object QItem {
|
|||||||
cats <- searchTagCategorySummary(today)(q)
|
cats <- searchTagCategorySummary(today)(q)
|
||||||
fields <- searchFieldSummary(today)(q)
|
fields <- searchFieldSummary(today)(q)
|
||||||
folders <- searchFolderSummary(today)(q)
|
folders <- searchFolderSummary(today)(q)
|
||||||
} yield SearchSummary(count, tags, cats, fields, folders)
|
orgs <- searchCorrOrgSummary(today)(q)
|
||||||
|
corrPers <- searchCorrPersonSummary(today)(q)
|
||||||
|
concPers <- searchConcPersonSummary(today)(q)
|
||||||
|
concEquip <- searchConcEquipSummary(today)(q)
|
||||||
|
} yield SearchSummary(
|
||||||
|
count,
|
||||||
|
tags,
|
||||||
|
cats,
|
||||||
|
fields,
|
||||||
|
folders,
|
||||||
|
orgs,
|
||||||
|
corrPers,
|
||||||
|
concPers,
|
||||||
|
concEquip
|
||||||
|
)
|
||||||
|
|
||||||
def searchTagCategorySummary(
|
def searchTagCategorySummary(
|
||||||
today: LocalDate
|
today: LocalDate
|
||||||
@ -251,6 +265,40 @@ object QItem {
|
|||||||
.query[Int]
|
.query[Int]
|
||||||
.unique
|
.unique
|
||||||
|
|
||||||
|
def searchCorrOrgSummary(today: LocalDate)(q: Query): ConnectionIO[List[IdRefCount]] =
|
||||||
|
searchIdRefSummary(org.oid, org.name, i.corrOrg, today)(q)
|
||||||
|
|
||||||
|
def searchCorrPersonSummary(today: LocalDate)(
|
||||||
|
q: Query
|
||||||
|
): ConnectionIO[List[IdRefCount]] =
|
||||||
|
searchIdRefSummary(pers0.pid, pers0.name, i.corrPerson, today)(q)
|
||||||
|
|
||||||
|
def searchConcPersonSummary(today: LocalDate)(
|
||||||
|
q: Query
|
||||||
|
): ConnectionIO[List[IdRefCount]] =
|
||||||
|
searchIdRefSummary(pers1.pid, pers1.name, i.concPerson, today)(q)
|
||||||
|
|
||||||
|
def searchConcEquipSummary(today: LocalDate)(
|
||||||
|
q: Query
|
||||||
|
): ConnectionIO[List[IdRefCount]] =
|
||||||
|
searchIdRefSummary(equip.eid, equip.name, i.concEquipment, today)(q)
|
||||||
|
|
||||||
|
private def searchIdRefSummary(
|
||||||
|
idCol: Column[Ident],
|
||||||
|
nameCol: Column[String],
|
||||||
|
fkCol: Column[Ident],
|
||||||
|
today: LocalDate
|
||||||
|
)(q: Query): ConnectionIO[List[IdRefCount]] =
|
||||||
|
findItemsBase(q.fix, today, 0).unwrap
|
||||||
|
.withSelect(select(idCol, nameCol).append(count(idCol).as("num")))
|
||||||
|
.changeWhere(c =>
|
||||||
|
c && fkCol.isNotNull && queryCondition(today, q.fix.account.collective, q.cond)
|
||||||
|
)
|
||||||
|
.groupBy(idCol, nameCol)
|
||||||
|
.build
|
||||||
|
.query[IdRefCount]
|
||||||
|
.to[List]
|
||||||
|
|
||||||
def searchFolderSummary(today: LocalDate)(q: Query): ConnectionIO[List[FolderCount]] = {
|
def searchFolderSummary(today: LocalDate)(q: Query): ConnectionIO[List[FolderCount]] = {
|
||||||
val fu = RUser.as("fu")
|
val fu = RUser.as("fu")
|
||||||
findItemsBase(q.fix, today, 0).unwrap
|
findItemsBase(q.fix, today, 0).unwrap
|
||||||
|
@ -20,7 +20,8 @@ object QUser {
|
|||||||
|
|
||||||
final case class UserData(
|
final case class UserData(
|
||||||
ownedFolders: List[Ident],
|
ownedFolders: List[Ident],
|
||||||
sentMails: Int
|
sentMails: Int,
|
||||||
|
shares: Int
|
||||||
)
|
)
|
||||||
|
|
||||||
def getUserData(accountId: AccountId): ConnectionIO[UserData] = {
|
def getUserData(accountId: AccountId): ConnectionIO[UserData] = {
|
||||||
@ -28,6 +29,7 @@ object QUser {
|
|||||||
val mail = RSentMail.as("m")
|
val mail = RSentMail.as("m")
|
||||||
val mitem = RSentMailItem.as("mi")
|
val mitem = RSentMailItem.as("mi")
|
||||||
val user = RUser.as("u")
|
val user = RUser.as("u")
|
||||||
|
val share = RShare.as("s")
|
||||||
|
|
||||||
for {
|
for {
|
||||||
uid <- loadUserId(accountId).map(_.getOrElse(Ident.unsafe("")))
|
uid <- loadUserId(accountId).map(_.getOrElse(Ident.unsafe("")))
|
||||||
@ -43,7 +45,13 @@ object QUser {
|
|||||||
.innerJoin(user, user.uid === mail.uid),
|
.innerJoin(user, user.uid === mail.uid),
|
||||||
user.login === accountId.user && user.cid === accountId.collective
|
user.login === accountId.user && user.cid === accountId.collective
|
||||||
).query[Int].unique
|
).query[Int].unique
|
||||||
} yield UserData(folders, mails)
|
shares <- run(
|
||||||
|
select(count(share.id)),
|
||||||
|
from(share)
|
||||||
|
.innerJoin(user, user.uid === share.userId),
|
||||||
|
user.login === accountId.user && user.cid === accountId.collective
|
||||||
|
).query[Int].unique
|
||||||
|
} yield UserData(folders, mails, shares)
|
||||||
}
|
}
|
||||||
|
|
||||||
def deleteUserAndData(accountId: AccountId): ConnectionIO[Int] =
|
def deleteUserAndData(accountId: AccountId): ConnectionIO[Int] =
|
||||||
|
@ -11,5 +11,23 @@ case class SearchSummary(
|
|||||||
tags: List[TagCount],
|
tags: List[TagCount],
|
||||||
cats: List[CategoryCount],
|
cats: List[CategoryCount],
|
||||||
fields: List[FieldStats],
|
fields: List[FieldStats],
|
||||||
folders: List[FolderCount]
|
folders: List[FolderCount],
|
||||||
)
|
corrOrgs: List[IdRefCount],
|
||||||
|
corrPers: List[IdRefCount],
|
||||||
|
concPers: List[IdRefCount],
|
||||||
|
concEquip: List[IdRefCount]
|
||||||
|
) {
|
||||||
|
|
||||||
|
def onlyExisting: SearchSummary =
|
||||||
|
SearchSummary(
|
||||||
|
count,
|
||||||
|
tags.filter(_.count > 0),
|
||||||
|
cats.filter(_.count > 0),
|
||||||
|
fields.filter(_.count > 0),
|
||||||
|
folders.filter(_.count > 0),
|
||||||
|
corrOrgs = corrOrgs.filter(_.count > 0),
|
||||||
|
corrPers = corrPers.filter(_.count > 0),
|
||||||
|
concPers = concPers.filter(_.count > 0),
|
||||||
|
concEquip = concEquip.filter(_.count > 0)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
@ -89,7 +89,8 @@ object RCollective {
|
|||||||
case None =>
|
case None =>
|
||||||
REmptyTrashSetting.delete(cid)
|
REmptyTrashSetting.delete(cid)
|
||||||
}
|
}
|
||||||
} yield n1 + n2 + n3
|
n4 <- RCollectivePassword.replaceAll(cid, settings.passwords)
|
||||||
|
} yield n1 + n2 + n3 + n4
|
||||||
|
|
||||||
// this hides categories that have been deleted in the meantime
|
// this hides categories that have been deleted in the meantime
|
||||||
// they are finally removed from the json array once the learn classifier task is run
|
// they are finally removed from the json array once the learn classifier task is run
|
||||||
@ -99,10 +100,12 @@ object RCollective {
|
|||||||
prev <- OptionT.fromOption[ConnectionIO](sett.classifier)
|
prev <- OptionT.fromOption[ConnectionIO](sett.classifier)
|
||||||
cats <- OptionT.liftF(RTag.listCategories(coll))
|
cats <- OptionT.liftF(RTag.listCategories(coll))
|
||||||
next = prev.copy(categories = prev.categories.intersect(cats))
|
next = prev.copy(categories = prev.categories.intersect(cats))
|
||||||
} yield sett.copy(classifier = Some(next))).value
|
pws <- OptionT.liftF(RCollectivePassword.findAll(coll))
|
||||||
|
} yield sett.copy(classifier = Some(next), passwords = pws.map(_.password))).value
|
||||||
|
|
||||||
private def getRawSettings(coll: Ident): ConnectionIO[Option[Settings]] = {
|
private def getRawSettings(coll: Ident): ConnectionIO[Option[Settings]] = {
|
||||||
import RClassifierSetting.stringListMeta
|
import RClassifierSetting.stringListMeta
|
||||||
|
|
||||||
val c = RCollective.as("c")
|
val c = RCollective.as("c")
|
||||||
val cs = RClassifierSetting.as("cs")
|
val cs = RClassifierSetting.as("cs")
|
||||||
val es = REmptyTrashSetting.as("es")
|
val es = REmptyTrashSetting.as("es")
|
||||||
@ -116,7 +119,8 @@ object RCollective {
|
|||||||
cs.categories.s,
|
cs.categories.s,
|
||||||
cs.listType.s,
|
cs.listType.s,
|
||||||
es.schedule.s,
|
es.schedule.s,
|
||||||
es.minAge.s
|
es.minAge.s,
|
||||||
|
const(0) //dummy value to load Nil as list of passwords
|
||||||
),
|
),
|
||||||
from(c).leftJoin(cs, cs.cid === c.id).leftJoin(es, es.cid === c.id),
|
from(c).leftJoin(cs, cs.cid === c.id).leftJoin(es, es.cid === c.id),
|
||||||
c.id === coll
|
c.id === coll
|
||||||
@ -170,7 +174,11 @@ object RCollective {
|
|||||||
language: Language,
|
language: Language,
|
||||||
integrationEnabled: Boolean,
|
integrationEnabled: Boolean,
|
||||||
classifier: Option[RClassifierSetting.Classifier],
|
classifier: Option[RClassifierSetting.Classifier],
|
||||||
emptyTrash: Option[REmptyTrashSetting.EmptyTrash]
|
emptyTrash: Option[REmptyTrashSetting.EmptyTrash],
|
||||||
|
passwords: List[Password]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
implicit val passwordListMeta: Read[List[Password]] =
|
||||||
|
Read[Int].map(_ => Nil: List[Password])
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,87 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.records
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
import cats.effect._
|
||||||
|
import cats.implicits._
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.store.qb.DSL._
|
||||||
|
import docspell.store.qb._
|
||||||
|
|
||||||
|
import doobie._
|
||||||
|
import doobie.implicits._
|
||||||
|
|
||||||
|
final case class RCollectivePassword(
|
||||||
|
id: Ident,
|
||||||
|
cid: Ident,
|
||||||
|
password: Password,
|
||||||
|
created: Timestamp
|
||||||
|
) {}
|
||||||
|
|
||||||
|
object RCollectivePassword {
|
||||||
|
final case class Table(alias: Option[String]) extends TableDef {
|
||||||
|
val tableName: String = "collective_password"
|
||||||
|
|
||||||
|
val id = Column[Ident]("id", this)
|
||||||
|
val cid = Column[Ident]("cid", this)
|
||||||
|
val password = Column[Password]("pass", this)
|
||||||
|
val created = Column[Timestamp]("created", this)
|
||||||
|
|
||||||
|
val all: NonEmptyList[Column[_]] =
|
||||||
|
NonEmptyList.of(id, cid, password, created)
|
||||||
|
}
|
||||||
|
|
||||||
|
val T = Table(None)
|
||||||
|
def as(alias: String): Table =
|
||||||
|
Table(Some(alias))
|
||||||
|
|
||||||
|
def createNew[F[_]: Sync](cid: Ident, pw: Password): F[RCollectivePassword] =
|
||||||
|
for {
|
||||||
|
id <- Ident.randomId[F]
|
||||||
|
time <- Timestamp.current[F]
|
||||||
|
} yield RCollectivePassword(id, cid, pw, time)
|
||||||
|
|
||||||
|
def insert(v: RCollectivePassword): ConnectionIO[Int] =
|
||||||
|
DML.insert(
|
||||||
|
T,
|
||||||
|
T.all,
|
||||||
|
fr"${v.id}, ${v.cid},${v.password},${v.created}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def upsert(v: RCollectivePassword): ConnectionIO[Int] =
|
||||||
|
for {
|
||||||
|
k <- deleteByPassword(v.cid, v.password)
|
||||||
|
n <- insert(v)
|
||||||
|
} yield n + k
|
||||||
|
|
||||||
|
def deleteById(id: Ident): ConnectionIO[Int] =
|
||||||
|
DML.delete(T, T.id === id)
|
||||||
|
|
||||||
|
def deleteByPassword(cid: Ident, pw: Password): ConnectionIO[Int] =
|
||||||
|
DML.delete(T, T.password === pw && T.cid === cid)
|
||||||
|
|
||||||
|
def findAll(cid: Ident): ConnectionIO[List[RCollectivePassword]] =
|
||||||
|
Select(select(T.all), from(T), T.cid === cid).build
|
||||||
|
.query[RCollectivePassword]
|
||||||
|
.to[List]
|
||||||
|
|
||||||
|
def replaceAll(cid: Ident, pws: List[Password]): ConnectionIO[Int] =
|
||||||
|
for {
|
||||||
|
k <- DML.delete(T, T.cid === cid)
|
||||||
|
pw <- pws.traverse(p => createNew[ConnectionIO](cid, p))
|
||||||
|
n <-
|
||||||
|
if (pws.isEmpty) 0.pure[ConnectionIO]
|
||||||
|
else
|
||||||
|
DML.insertMany(
|
||||||
|
T,
|
||||||
|
T.all,
|
||||||
|
pw.map(p => fr"${p.id},${p.cid},${p.password},${p.created}")
|
||||||
|
)
|
||||||
|
} yield k + n
|
||||||
|
}
|
167
modules/store/src/main/scala/docspell/store/records/RShare.scala
Normal file
167
modules/store/src/main/scala/docspell/store/records/RShare.scala
Normal file
@ -0,0 +1,167 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.store.records
|
||||||
|
|
||||||
|
import cats.data.{NonEmptyList, OptionT}
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
|
import docspell.query.ItemQuery
|
||||||
|
import docspell.store.qb.DSL._
|
||||||
|
import docspell.store.qb._
|
||||||
|
|
||||||
|
import doobie._
|
||||||
|
import doobie.implicits._
|
||||||
|
|
||||||
|
final case class RShare(
|
||||||
|
id: Ident,
|
||||||
|
userId: Ident,
|
||||||
|
name: Option[String],
|
||||||
|
query: ItemQuery,
|
||||||
|
enabled: Boolean,
|
||||||
|
password: Option[Password],
|
||||||
|
publishAt: Timestamp,
|
||||||
|
publishUntil: Timestamp,
|
||||||
|
views: Int,
|
||||||
|
lastAccess: Option[Timestamp]
|
||||||
|
) {}
|
||||||
|
|
||||||
|
object RShare {
|
||||||
|
|
||||||
|
final case class Table(alias: Option[String]) extends TableDef {
|
||||||
|
val tableName = "item_share";
|
||||||
|
|
||||||
|
val id = Column[Ident]("id", this)
|
||||||
|
val userId = Column[Ident]("user_id", this)
|
||||||
|
val name = Column[String]("name", this)
|
||||||
|
val query = Column[ItemQuery]("query", this)
|
||||||
|
val enabled = Column[Boolean]("enabled", this)
|
||||||
|
val password = Column[Password]("pass", this)
|
||||||
|
val publishedAt = Column[Timestamp]("publish_at", this)
|
||||||
|
val publishedUntil = Column[Timestamp]("publish_until", this)
|
||||||
|
val views = Column[Int]("views", this)
|
||||||
|
val lastAccess = Column[Timestamp]("last_access", this)
|
||||||
|
|
||||||
|
val all: NonEmptyList[Column[_]] =
|
||||||
|
NonEmptyList.of(
|
||||||
|
id,
|
||||||
|
userId,
|
||||||
|
name,
|
||||||
|
query,
|
||||||
|
enabled,
|
||||||
|
password,
|
||||||
|
publishedAt,
|
||||||
|
publishedUntil,
|
||||||
|
views,
|
||||||
|
lastAccess
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
val T: Table = Table(None)
|
||||||
|
def as(alias: String): Table = Table(Some(alias))
|
||||||
|
|
||||||
|
def insert(r: RShare): ConnectionIO[Int] =
|
||||||
|
DML.insert(
|
||||||
|
T,
|
||||||
|
T.all,
|
||||||
|
fr"${r.id},${r.userId},${r.name},${r.query},${r.enabled},${r.password},${r.publishAt},${r.publishUntil},${r.views},${r.lastAccess}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def incAccess(id: Ident): ConnectionIO[Int] =
|
||||||
|
for {
|
||||||
|
curTime <- Timestamp.current[ConnectionIO]
|
||||||
|
n <- DML.update(
|
||||||
|
T,
|
||||||
|
T.id === id,
|
||||||
|
DML.set(T.views.increment(1), T.lastAccess.setTo(curTime))
|
||||||
|
)
|
||||||
|
} yield n
|
||||||
|
|
||||||
|
def updateData(r: RShare, removePassword: Boolean): ConnectionIO[Int] =
|
||||||
|
DML.update(
|
||||||
|
T,
|
||||||
|
T.id === r.id && T.userId === r.userId,
|
||||||
|
DML.set(
|
||||||
|
T.name.setTo(r.name),
|
||||||
|
T.query.setTo(r.query),
|
||||||
|
T.enabled.setTo(r.enabled),
|
||||||
|
T.publishedUntil.setTo(r.publishUntil)
|
||||||
|
) ++ (if (r.password.isDefined || removePassword)
|
||||||
|
List(T.password.setTo(r.password))
|
||||||
|
else Nil)
|
||||||
|
)
|
||||||
|
|
||||||
|
def findOne(id: Ident, cid: Ident): OptionT[ConnectionIO, (RShare, RUser)] = {
|
||||||
|
val s = RShare.as("s")
|
||||||
|
val u = RUser.as("u")
|
||||||
|
|
||||||
|
OptionT(
|
||||||
|
Select(
|
||||||
|
select(s.all, u.all),
|
||||||
|
from(s).innerJoin(u, u.uid === s.userId),
|
||||||
|
s.id === id && u.cid === cid
|
||||||
|
).build
|
||||||
|
.query[(RShare, RUser)]
|
||||||
|
.option
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private def activeCondition(t: Table, id: Ident, current: Timestamp): Condition =
|
||||||
|
t.id === id && t.enabled === true && t.publishedUntil > current
|
||||||
|
|
||||||
|
def findActive(
|
||||||
|
id: Ident,
|
||||||
|
current: Timestamp
|
||||||
|
): OptionT[ConnectionIO, (RShare, RUser)] = {
|
||||||
|
val s = RShare.as("s")
|
||||||
|
val u = RUser.as("u")
|
||||||
|
|
||||||
|
OptionT(
|
||||||
|
Select(
|
||||||
|
select(s.all, u.all),
|
||||||
|
from(s).innerJoin(u, s.userId === u.uid),
|
||||||
|
activeCondition(s, id, current)
|
||||||
|
).build.query[(RShare, RUser)].option
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def findCurrentActive(id: Ident): OptionT[ConnectionIO, (RShare, RUser)] =
|
||||||
|
OptionT.liftF(Timestamp.current[ConnectionIO]).flatMap(now => findActive(id, now))
|
||||||
|
|
||||||
|
def findActivePassword(id: Ident): OptionT[ConnectionIO, Option[Password]] =
|
||||||
|
OptionT(Timestamp.current[ConnectionIO].flatMap { now =>
|
||||||
|
Select(select(T.password), from(T), activeCondition(T, id, now)).build
|
||||||
|
.query[Option[Password]]
|
||||||
|
.option
|
||||||
|
})
|
||||||
|
|
||||||
|
def findAllByCollective(
|
||||||
|
cid: Ident,
|
||||||
|
ownerLogin: Option[Ident],
|
||||||
|
q: Option[String]
|
||||||
|
): ConnectionIO[List[(RShare, RUser)]] = {
|
||||||
|
val s = RShare.as("s")
|
||||||
|
val u = RUser.as("u")
|
||||||
|
|
||||||
|
val ownerQ = ownerLogin.map(name => u.login === name)
|
||||||
|
val nameQ = q.map(n => s.name.like(s"%$n%"))
|
||||||
|
|
||||||
|
Select(
|
||||||
|
select(s.all, u.all),
|
||||||
|
from(s).innerJoin(u, u.uid === s.userId),
|
||||||
|
u.cid === cid &&? ownerQ &&? nameQ
|
||||||
|
)
|
||||||
|
.orderBy(s.publishedAt.desc)
|
||||||
|
.build
|
||||||
|
.query[(RShare, RUser)]
|
||||||
|
.to[List]
|
||||||
|
}
|
||||||
|
|
||||||
|
def deleteByIdAndCid(id: Ident, cid: Ident): ConnectionIO[Int] = {
|
||||||
|
val u = RUser.T
|
||||||
|
DML.delete(T, T.id === id && T.userId.in(Select(u.uid.s, from(u), u.cid === cid)))
|
||||||
|
}
|
||||||
|
}
|
@ -26,7 +26,13 @@ case class RUser(
|
|||||||
loginCount: Int,
|
loginCount: Int,
|
||||||
lastLogin: Option[Timestamp],
|
lastLogin: Option[Timestamp],
|
||||||
created: Timestamp
|
created: Timestamp
|
||||||
) {}
|
) {
|
||||||
|
def accountId: AccountId =
|
||||||
|
AccountId(cid, login)
|
||||||
|
|
||||||
|
def idRef: IdRef =
|
||||||
|
IdRef(uid, login.id)
|
||||||
|
}
|
||||||
|
|
||||||
object RUser {
|
object RUser {
|
||||||
|
|
||||||
|
@ -16,12 +16,13 @@
|
|||||||
"elm/html": "1.0.0",
|
"elm/html": "1.0.0",
|
||||||
"elm/http": "2.0.0",
|
"elm/http": "2.0.0",
|
||||||
"elm/json": "1.1.3",
|
"elm/json": "1.1.3",
|
||||||
|
"elm/svg": "1.0.1",
|
||||||
"elm/time": "1.0.0",
|
"elm/time": "1.0.0",
|
||||||
"elm/url": "1.0.0",
|
"elm/url": "1.0.0",
|
||||||
"elm-explorations/markdown": "1.0.0",
|
"elm-explorations/markdown": "1.0.0",
|
||||||
"justinmimbs/date": "3.1.2",
|
"justinmimbs/date": "3.1.2",
|
||||||
"norpan/elm-html5-drag-drop": "3.1.4",
|
"norpan/elm-html5-drag-drop": "3.1.4",
|
||||||
"pablohirafuji/elm-qrcode": "3.3.1",
|
"pablohirafuji/elm-qrcode": "4.0.1",
|
||||||
"ryannhg/date-format": "2.3.0",
|
"ryannhg/date-format": "2.3.0",
|
||||||
"truqu/elm-base64": "2.0.4",
|
"truqu/elm-base64": "2.0.4",
|
||||||
"ursi/elm-scroll": "1.0.0",
|
"ursi/elm-scroll": "1.0.0",
|
||||||
@ -33,7 +34,6 @@
|
|||||||
"elm/bytes": "1.0.8",
|
"elm/bytes": "1.0.8",
|
||||||
"elm/parser": "1.1.0",
|
"elm/parser": "1.1.0",
|
||||||
"elm/regex": "1.0.0",
|
"elm/regex": "1.0.0",
|
||||||
"elm/svg": "1.0.1",
|
|
||||||
"elm/virtual-dom": "1.0.2",
|
"elm/virtual-dom": "1.0.2",
|
||||||
"elm-community/list-extra": "8.2.4",
|
"elm-community/list-extra": "8.2.4",
|
||||||
"folkertdev/elm-flate": "2.0.4",
|
"folkertdev/elm-flate": "2.0.4",
|
||||||
|
1974
modules/webapp/package-lock.json
generated
1974
modules/webapp/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -2,15 +2,17 @@
|
|||||||
"name": "docspell-css",
|
"name": "docspell-css",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"dependencies": {
|
"dependencies": {},
|
||||||
"@fortawesome/fontawesome-free": "^5.15.3",
|
"devDependencies": {
|
||||||
"@tailwindcss/forms": "^0.3.0",
|
"@fortawesome/fontawesome-free": "^5.15.4",
|
||||||
"autoprefixer": "^10.2.5",
|
"@tailwindcss/forms": "^0.3.4",
|
||||||
"cssnano": "^5.0.0",
|
|
||||||
"flag-icon-css": "^3.5.0",
|
"flag-icon-css": "^3.5.0",
|
||||||
"postcss": "^8.2.9",
|
"postcss-cli": "^9.0.1",
|
||||||
"postcss-cli": "^8.3.1",
|
"postcss-import": "^14.0.2",
|
||||||
"postcss-import": "^14.0.1",
|
"autoprefixer": "^10.3.7",
|
||||||
"tailwindcss": "^2.1.1"
|
"cssnano": "^5.0.8",
|
||||||
|
"postcss": "^8.3.11",
|
||||||
|
"postcss-purgecss": "^2.0.3",
|
||||||
|
"tailwindcss": "^2.2.17"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,7 @@ const prodPlugins =
|
|||||||
require('postcss-import'),
|
require('postcss-import'),
|
||||||
tailwindcss("./tailwind.config.js"),
|
tailwindcss("./tailwind.config.js"),
|
||||||
require("autoprefixer"),
|
require("autoprefixer"),
|
||||||
require("@fullhuman/postcss-purgecss")({
|
require("postcss-purgecss")({
|
||||||
content: [
|
content: [
|
||||||
"./src/main/elm/**/*.elm",
|
"./src/main/elm/**/*.elm",
|
||||||
"./src/main/styles/keep.txt",
|
"./src/main/styles/keep.txt",
|
||||||
|
@ -11,6 +11,7 @@ module Api exposing
|
|||||||
, addCorrOrg
|
, addCorrOrg
|
||||||
, addCorrPerson
|
, addCorrPerson
|
||||||
, addMember
|
, addMember
|
||||||
|
, addShare
|
||||||
, addTag
|
, addTag
|
||||||
, addTagsMultiple
|
, addTagsMultiple
|
||||||
, attachmentPreviewURL
|
, attachmentPreviewURL
|
||||||
@ -40,6 +41,7 @@ module Api exposing
|
|||||||
, deleteOrg
|
, deleteOrg
|
||||||
, deletePerson
|
, deletePerson
|
||||||
, deleteScanMailbox
|
, deleteScanMailbox
|
||||||
|
, deleteShare
|
||||||
, deleteSource
|
, deleteSource
|
||||||
, deleteTag
|
, deleteTag
|
||||||
, deleteUser
|
, deleteUser
|
||||||
@ -72,6 +74,8 @@ module Api exposing
|
|||||||
, getPersonsLight
|
, getPersonsLight
|
||||||
, getScanMailbox
|
, getScanMailbox
|
||||||
, getSentMails
|
, getSentMails
|
||||||
|
, getShare
|
||||||
|
, getShares
|
||||||
, getSources
|
, getSources
|
||||||
, getTagCloud
|
, getTagCloud
|
||||||
, getTags
|
, getTags
|
||||||
@ -79,6 +83,7 @@ module Api exposing
|
|||||||
, initOtp
|
, initOtp
|
||||||
, itemBasePreviewURL
|
, itemBasePreviewURL
|
||||||
, itemDetail
|
, itemDetail
|
||||||
|
, itemDetailShare
|
||||||
, itemIndexSearch
|
, itemIndexSearch
|
||||||
, itemSearch
|
, itemSearch
|
||||||
, itemSearchStats
|
, itemSearchStats
|
||||||
@ -109,6 +114,8 @@ module Api exposing
|
|||||||
, restoreAllItems
|
, restoreAllItems
|
||||||
, restoreItem
|
, restoreItem
|
||||||
, saveClientSettings
|
, saveClientSettings
|
||||||
|
, searchShare
|
||||||
|
, searchShareStats
|
||||||
, sendMail
|
, sendMail
|
||||||
, setAttachmentName
|
, setAttachmentName
|
||||||
, setCollectiveSettings
|
, setCollectiveSettings
|
||||||
@ -136,6 +143,10 @@ module Api exposing
|
|||||||
, setTags
|
, setTags
|
||||||
, setTagsMultiple
|
, setTagsMultiple
|
||||||
, setUnconfirmed
|
, setUnconfirmed
|
||||||
|
, shareAttachmentPreviewURL
|
||||||
|
, shareFileURL
|
||||||
|
, shareItemBasePreviewURL
|
||||||
|
, shareSendMail
|
||||||
, startClassifier
|
, startClassifier
|
||||||
, startEmptyTrash
|
, startEmptyTrash
|
||||||
, startOnceNotifyDueItems
|
, startOnceNotifyDueItems
|
||||||
@ -147,9 +158,11 @@ module Api exposing
|
|||||||
, unconfirmMultiple
|
, unconfirmMultiple
|
||||||
, updateNotifyDueItems
|
, updateNotifyDueItems
|
||||||
, updateScanMailbox
|
, updateScanMailbox
|
||||||
|
, updateShare
|
||||||
, upload
|
, upload
|
||||||
, uploadAmend
|
, uploadAmend
|
||||||
, uploadSingle
|
, uploadSingle
|
||||||
|
, verifyShare
|
||||||
, versionInfo
|
, versionInfo
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -215,7 +228,13 @@ import Api.Model.ScanMailboxSettingsList exposing (ScanMailboxSettingsList)
|
|||||||
import Api.Model.SearchStats exposing (SearchStats)
|
import Api.Model.SearchStats exposing (SearchStats)
|
||||||
import Api.Model.SecondFactor exposing (SecondFactor)
|
import Api.Model.SecondFactor exposing (SecondFactor)
|
||||||
import Api.Model.SentMails exposing (SentMails)
|
import Api.Model.SentMails exposing (SentMails)
|
||||||
|
import Api.Model.ShareData exposing (ShareData)
|
||||||
|
import Api.Model.ShareDetail exposing (ShareDetail)
|
||||||
|
import Api.Model.ShareList exposing (ShareList)
|
||||||
|
import Api.Model.ShareSecret exposing (ShareSecret)
|
||||||
|
import Api.Model.ShareVerifyResult exposing (ShareVerifyResult)
|
||||||
import Api.Model.SimpleMail exposing (SimpleMail)
|
import Api.Model.SimpleMail exposing (SimpleMail)
|
||||||
|
import Api.Model.SimpleShareMail exposing (SimpleShareMail)
|
||||||
import Api.Model.SourceAndTags exposing (SourceAndTags)
|
import Api.Model.SourceAndTags exposing (SourceAndTags)
|
||||||
import Api.Model.SourceList exposing (SourceList)
|
import Api.Model.SourceList exposing (SourceList)
|
||||||
import Api.Model.SourceTagIn
|
import Api.Model.SourceTagIn
|
||||||
@ -2206,6 +2225,134 @@ disableOtp flags otp receive =
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
--- Share
|
||||||
|
|
||||||
|
|
||||||
|
getShares : Flags -> String -> Bool -> (Result Http.Error ShareList -> msg) -> Cmd msg
|
||||||
|
getShares flags query owning receive =
|
||||||
|
Http2.authGet
|
||||||
|
{ url =
|
||||||
|
flags.config.baseUrl
|
||||||
|
++ "/api/v1/sec/share?q="
|
||||||
|
++ Url.percentEncode query
|
||||||
|
++ (if owning then
|
||||||
|
"&owning"
|
||||||
|
|
||||||
|
else
|
||||||
|
""
|
||||||
|
)
|
||||||
|
, account = getAccount flags
|
||||||
|
, expect = Http.expectJson receive Api.Model.ShareList.decoder
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
getShare : Flags -> String -> (Result Http.Error ShareDetail -> msg) -> Cmd msg
|
||||||
|
getShare flags id receive =
|
||||||
|
Http2.authGet
|
||||||
|
{ url = flags.config.baseUrl ++ "/api/v1/sec/share/" ++ id
|
||||||
|
, account = getAccount flags
|
||||||
|
, expect = Http.expectJson receive Api.Model.ShareDetail.decoder
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
addShare : Flags -> ShareData -> (Result Http.Error IdResult -> msg) -> Cmd msg
|
||||||
|
addShare flags share receive =
|
||||||
|
Http2.authPost
|
||||||
|
{ url = flags.config.baseUrl ++ "/api/v1/sec/share"
|
||||||
|
, account = getAccount flags
|
||||||
|
, body = Http.jsonBody (Api.Model.ShareData.encode share)
|
||||||
|
, expect = Http.expectJson receive Api.Model.IdResult.decoder
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
updateShare : Flags -> String -> ShareData -> (Result Http.Error BasicResult -> msg) -> Cmd msg
|
||||||
|
updateShare flags id share receive =
|
||||||
|
Http2.authPut
|
||||||
|
{ url = flags.config.baseUrl ++ "/api/v1/sec/share/" ++ id
|
||||||
|
, account = getAccount flags
|
||||||
|
, body = Http.jsonBody (Api.Model.ShareData.encode share)
|
||||||
|
, expect = Http.expectJson receive Api.Model.BasicResult.decoder
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
deleteShare : Flags -> String -> (Result Http.Error BasicResult -> msg) -> Cmd msg
|
||||||
|
deleteShare flags id receive =
|
||||||
|
Http2.authDelete
|
||||||
|
{ url = flags.config.baseUrl ++ "/api/v1/sec/share/" ++ id
|
||||||
|
, account = getAccount flags
|
||||||
|
, expect = Http.expectJson receive Api.Model.BasicResult.decoder
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
verifyShare : Flags -> ShareSecret -> (Result Http.Error ShareVerifyResult -> msg) -> Cmd msg
|
||||||
|
verifyShare flags secret receive =
|
||||||
|
Http2.authPost
|
||||||
|
{ url = flags.config.baseUrl ++ "/api/v1/open/share/verify"
|
||||||
|
, account = getAccount flags
|
||||||
|
, body = Http.jsonBody (Api.Model.ShareSecret.encode secret)
|
||||||
|
, expect = Http.expectJson receive Api.Model.ShareVerifyResult.decoder
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
searchShare : Flags -> String -> ItemQuery -> (Result Http.Error ItemLightList -> msg) -> Cmd msg
|
||||||
|
searchShare flags token search receive =
|
||||||
|
Http2.sharePost
|
||||||
|
{ url = flags.config.baseUrl ++ "/api/v1/share/search/query"
|
||||||
|
, token = token
|
||||||
|
, body = Http.jsonBody (Api.Model.ItemQuery.encode search)
|
||||||
|
, expect = Http.expectJson receive Api.Model.ItemLightList.decoder
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
searchShareStats : Flags -> String -> ItemQuery -> (Result Http.Error SearchStats -> msg) -> Cmd msg
|
||||||
|
searchShareStats flags token search receive =
|
||||||
|
Http2.sharePost
|
||||||
|
{ url = flags.config.baseUrl ++ "/api/v1/share/search/stats"
|
||||||
|
, token = token
|
||||||
|
, body = Http.jsonBody (Api.Model.ItemQuery.encode search)
|
||||||
|
, expect = Http.expectJson receive Api.Model.SearchStats.decoder
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
itemDetailShare : Flags -> String -> String -> (Result Http.Error ItemDetail -> msg) -> Cmd msg
|
||||||
|
itemDetailShare flags token itemId receive =
|
||||||
|
Http2.shareGet
|
||||||
|
{ url = flags.config.baseUrl ++ "/api/v1/share/item/" ++ itemId
|
||||||
|
, token = token
|
||||||
|
, expect = Http.expectJson receive Api.Model.ItemDetail.decoder
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
shareSendMail :
|
||||||
|
Flags
|
||||||
|
-> { conn : String, mail : SimpleShareMail }
|
||||||
|
-> (Result Http.Error BasicResult -> msg)
|
||||||
|
-> Cmd msg
|
||||||
|
shareSendMail flags opts receive =
|
||||||
|
Http2.authPost
|
||||||
|
{ url = flags.config.baseUrl ++ "/api/v1/sec/share/email/send/" ++ opts.conn
|
||||||
|
, account = getAccount flags
|
||||||
|
, body = Http.jsonBody (Api.Model.SimpleShareMail.encode opts.mail)
|
||||||
|
, expect = Http.expectJson receive Api.Model.BasicResult.decoder
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
shareAttachmentPreviewURL : String -> String
|
||||||
|
shareAttachmentPreviewURL id =
|
||||||
|
"/api/v1/share/attachment/" ++ id ++ "/preview?withFallback=true"
|
||||||
|
|
||||||
|
|
||||||
|
shareItemBasePreviewURL : String -> String
|
||||||
|
shareItemBasePreviewURL itemId =
|
||||||
|
"/api/v1/share/item/" ++ itemId ++ "/preview?withFallback=true"
|
||||||
|
|
||||||
|
|
||||||
|
shareFileURL : String -> String
|
||||||
|
shareFileURL attachId =
|
||||||
|
"/api/v1/share/attachment/" ++ attachId
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
--- Helper
|
--- Helper
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,6 +32,8 @@ import Page.ManageData.Data
|
|||||||
import Page.NewInvite.Data
|
import Page.NewInvite.Data
|
||||||
import Page.Queue.Data
|
import Page.Queue.Data
|
||||||
import Page.Register.Data
|
import Page.Register.Data
|
||||||
|
import Page.Share.Data
|
||||||
|
import Page.ShareDetail.Data
|
||||||
import Page.Upload.Data
|
import Page.Upload.Data
|
||||||
import Page.UserSettings.Data
|
import Page.UserSettings.Data
|
||||||
import Url exposing (Url)
|
import Url exposing (Url)
|
||||||
@ -52,6 +54,8 @@ type alias Model =
|
|||||||
, uploadModel : Page.Upload.Data.Model
|
, uploadModel : Page.Upload.Data.Model
|
||||||
, newInviteModel : Page.NewInvite.Data.Model
|
, newInviteModel : Page.NewInvite.Data.Model
|
||||||
, itemDetailModel : Page.ItemDetail.Data.Model
|
, itemDetailModel : Page.ItemDetail.Data.Model
|
||||||
|
, shareModel : Page.Share.Data.Model
|
||||||
|
, shareDetailModel : Page.ShareDetail.Data.Model
|
||||||
, navMenuOpen : Bool
|
, navMenuOpen : Bool
|
||||||
, userMenuOpen : Bool
|
, userMenuOpen : Bool
|
||||||
, subs : Sub Msg
|
, subs : Sub Msg
|
||||||
@ -85,6 +89,12 @@ init key url flags_ settings =
|
|||||||
( loginm, loginc ) =
|
( loginm, loginc ) =
|
||||||
Page.Login.Data.init flags (Page.loginPageReferrer page)
|
Page.Login.Data.init flags (Page.loginPageReferrer page)
|
||||||
|
|
||||||
|
( shm, shc ) =
|
||||||
|
Page.Share.Data.init (Page.pageShareId page) flags
|
||||||
|
|
||||||
|
( sdm, sdc ) =
|
||||||
|
Page.ShareDetail.Data.init (Page.pageShareDetail page) flags
|
||||||
|
|
||||||
homeViewMode =
|
homeViewMode =
|
||||||
if settings.searchMenuVisible then
|
if settings.searchMenuVisible then
|
||||||
Page.Home.Data.SearchView
|
Page.Home.Data.SearchView
|
||||||
@ -106,6 +116,8 @@ init key url flags_ settings =
|
|||||||
, uploadModel = Page.Upload.Data.emptyModel
|
, uploadModel = Page.Upload.Data.emptyModel
|
||||||
, newInviteModel = Page.NewInvite.Data.emptyModel
|
, newInviteModel = Page.NewInvite.Data.emptyModel
|
||||||
, itemDetailModel = Page.ItemDetail.Data.emptyModel
|
, itemDetailModel = Page.ItemDetail.Data.emptyModel
|
||||||
|
, shareModel = shm
|
||||||
|
, shareDetailModel = sdm
|
||||||
, navMenuOpen = False
|
, navMenuOpen = False
|
||||||
, userMenuOpen = False
|
, userMenuOpen = False
|
||||||
, subs = Sub.none
|
, subs = Sub.none
|
||||||
@ -120,6 +132,8 @@ init key url flags_ settings =
|
|||||||
, Cmd.map ManageDataMsg mdc
|
, Cmd.map ManageDataMsg mdc
|
||||||
, Cmd.map CollSettingsMsg csc
|
, Cmd.map CollSettingsMsg csc
|
||||||
, Cmd.map LoginMsg loginc
|
, Cmd.map LoginMsg loginc
|
||||||
|
, Cmd.map ShareMsg shc
|
||||||
|
, Cmd.map ShareDetailMsg sdc
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -162,6 +176,8 @@ type Msg
|
|||||||
| UploadMsg Page.Upload.Data.Msg
|
| UploadMsg Page.Upload.Data.Msg
|
||||||
| NewInviteMsg Page.NewInvite.Data.Msg
|
| NewInviteMsg Page.NewInvite.Data.Msg
|
||||||
| ItemDetailMsg Page.ItemDetail.Data.Msg
|
| ItemDetailMsg Page.ItemDetail.Data.Msg
|
||||||
|
| ShareMsg Page.Share.Data.Msg
|
||||||
|
| ShareDetailMsg Page.ShareDetail.Data.Msg
|
||||||
| Logout
|
| Logout
|
||||||
| LogoutResp (Result Http.Error ())
|
| LogoutResp (Result Http.Error ())
|
||||||
| SessionCheckResp (Result Http.Error AuthResult)
|
| SessionCheckResp (Result Http.Error AuthResult)
|
||||||
|
@ -17,6 +17,7 @@ import Browser.Navigation as Nav
|
|||||||
import Data.Flags
|
import Data.Flags
|
||||||
import Data.UiSettings exposing (UiSettings)
|
import Data.UiSettings exposing (UiSettings)
|
||||||
import Data.UiTheme
|
import Data.UiTheme
|
||||||
|
import Messages exposing (Messages)
|
||||||
import Page exposing (Page(..))
|
import Page exposing (Page(..))
|
||||||
import Page.CollectiveSettings.Data
|
import Page.CollectiveSettings.Data
|
||||||
import Page.CollectiveSettings.Update
|
import Page.CollectiveSettings.Update
|
||||||
@ -34,6 +35,10 @@ import Page.Queue.Data
|
|||||||
import Page.Queue.Update
|
import Page.Queue.Update
|
||||||
import Page.Register.Data
|
import Page.Register.Data
|
||||||
import Page.Register.Update
|
import Page.Register.Update
|
||||||
|
import Page.Share.Data
|
||||||
|
import Page.Share.Update
|
||||||
|
import Page.ShareDetail.Data
|
||||||
|
import Page.ShareDetail.Update
|
||||||
import Page.Upload.Data
|
import Page.Upload.Data
|
||||||
import Page.Upload.Update
|
import Page.Upload.Update
|
||||||
import Page.UserSettings.Data
|
import Page.UserSettings.Data
|
||||||
@ -55,6 +60,10 @@ update msg model =
|
|||||||
|
|
||||||
updateWithSub : Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
updateWithSub : Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
||||||
updateWithSub msg model =
|
updateWithSub msg model =
|
||||||
|
let
|
||||||
|
texts =
|
||||||
|
Messages.get <| App.Data.getUiLanguage model
|
||||||
|
in
|
||||||
case msg of
|
case msg of
|
||||||
ToggleSidebar ->
|
ToggleSidebar ->
|
||||||
( { model | sidebarVisible = not model.sidebarVisible }, Cmd.none, Sub.none )
|
( { model | sidebarVisible = not model.sidebarVisible }, Cmd.none, Sub.none )
|
||||||
@ -94,7 +103,7 @@ updateWithSub msg model =
|
|||||||
|
|
||||||
ClientSettingsSaveResp settings (Ok res) ->
|
ClientSettingsSaveResp settings (Ok res) ->
|
||||||
if res.success then
|
if res.success then
|
||||||
applyClientSettings model settings
|
applyClientSettings texts model settings
|
||||||
|
|
||||||
else
|
else
|
||||||
( model, Cmd.none, Sub.none )
|
( model, Cmd.none, Sub.none )
|
||||||
@ -112,7 +121,13 @@ updateWithSub msg model =
|
|||||||
( { model | anonymousUiLang = lang, langMenuOpen = False }, Cmd.none, Sub.none )
|
( { model | anonymousUiLang = lang, langMenuOpen = False }, Cmd.none, Sub.none )
|
||||||
|
|
||||||
HomeMsg lm ->
|
HomeMsg lm ->
|
||||||
updateHome lm model
|
updateHome texts lm model
|
||||||
|
|
||||||
|
ShareMsg lm ->
|
||||||
|
updateShare lm model
|
||||||
|
|
||||||
|
ShareDetailMsg lm ->
|
||||||
|
updateShareDetail lm model
|
||||||
|
|
||||||
LoginMsg lm ->
|
LoginMsg lm ->
|
||||||
updateLogin lm model
|
updateLogin lm model
|
||||||
@ -121,10 +136,10 @@ updateWithSub msg model =
|
|||||||
updateManageData lm model
|
updateManageData lm model
|
||||||
|
|
||||||
CollSettingsMsg m ->
|
CollSettingsMsg m ->
|
||||||
updateCollSettings m model
|
updateCollSettings texts m model
|
||||||
|
|
||||||
UserSettingsMsg m ->
|
UserSettingsMsg m ->
|
||||||
updateUserSettings m model
|
updateUserSettings texts m model
|
||||||
|
|
||||||
QueueMsg m ->
|
QueueMsg m ->
|
||||||
updateQueue m model
|
updateQueue m model
|
||||||
@ -139,7 +154,7 @@ updateWithSub msg model =
|
|||||||
updateNewInvite m model
|
updateNewInvite m model
|
||||||
|
|
||||||
ItemDetailMsg m ->
|
ItemDetailMsg m ->
|
||||||
updateItemDetail m model
|
updateItemDetail texts m model
|
||||||
|
|
||||||
VersionResp (Ok info) ->
|
VersionResp (Ok info) ->
|
||||||
( { model | version = info }, Cmd.none, Sub.none )
|
( { model | version = info }, Cmd.none, Sub.none )
|
||||||
@ -281,7 +296,7 @@ updateWithSub msg model =
|
|||||||
)
|
)
|
||||||
|
|
||||||
GetUiSettings (Ok settings) ->
|
GetUiSettings (Ok settings) ->
|
||||||
applyClientSettings model settings
|
applyClientSettings texts model settings
|
||||||
|
|
||||||
GetUiSettings (Err _) ->
|
GetUiSettings (Err _) ->
|
||||||
( model, Cmd.none, Sub.none )
|
( model, Cmd.none, Sub.none )
|
||||||
@ -291,11 +306,11 @@ updateWithSub msg model =
|
|||||||
lm =
|
lm =
|
||||||
Page.UserSettings.Data.ReceiveBrowserSettings sett
|
Page.UserSettings.Data.ReceiveBrowserSettings sett
|
||||||
in
|
in
|
||||||
updateUserSettings lm model
|
updateUserSettings texts lm model
|
||||||
|
|
||||||
|
|
||||||
applyClientSettings : Model -> UiSettings -> ( Model, Cmd Msg, Sub Msg )
|
applyClientSettings : Messages -> Model -> UiSettings -> ( Model, Cmd Msg, Sub Msg )
|
||||||
applyClientSettings model settings =
|
applyClientSettings texts model settings =
|
||||||
let
|
let
|
||||||
setTheme =
|
setTheme =
|
||||||
Ports.setUiTheme settings.uiTheme
|
Ports.setUiTheme settings.uiTheme
|
||||||
@ -306,15 +321,49 @@ applyClientSettings model settings =
|
|||||||
, setTheme
|
, setTheme
|
||||||
, Sub.none
|
, Sub.none
|
||||||
)
|
)
|
||||||
, updateUserSettings Page.UserSettings.Data.UpdateSettings
|
, updateUserSettings texts Page.UserSettings.Data.UpdateSettings
|
||||||
, updateHome Page.Home.Data.UiSettingsUpdated
|
, updateHome texts Page.Home.Data.UiSettingsUpdated
|
||||||
, updateItemDetail Page.ItemDetail.Data.UiSettingsUpdated
|
, updateItemDetail texts Page.ItemDetail.Data.UiSettingsUpdated
|
||||||
]
|
]
|
||||||
{ model | uiSettings = settings }
|
{ model | uiSettings = settings }
|
||||||
|
|
||||||
|
|
||||||
updateItemDetail : Page.ItemDetail.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
updateShareDetail : Page.ShareDetail.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
||||||
updateItemDetail lmsg model =
|
updateShareDetail lmsg model =
|
||||||
|
case Page.pageShareDetail model.page of
|
||||||
|
Just ( shareId, itemId ) ->
|
||||||
|
let
|
||||||
|
( m, c ) =
|
||||||
|
Page.ShareDetail.Update.update shareId itemId model.flags lmsg model.shareDetailModel
|
||||||
|
in
|
||||||
|
( { model | shareDetailModel = m }
|
||||||
|
, Cmd.map ShareDetailMsg c
|
||||||
|
, Sub.none
|
||||||
|
)
|
||||||
|
|
||||||
|
Nothing ->
|
||||||
|
( model, Cmd.none, Sub.none )
|
||||||
|
|
||||||
|
|
||||||
|
updateShare : Page.Share.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
||||||
|
updateShare lmsg model =
|
||||||
|
case Page.pageShareId model.page of
|
||||||
|
Just id ->
|
||||||
|
let
|
||||||
|
result =
|
||||||
|
Page.Share.Update.update model.flags model.uiSettings id lmsg model.shareModel
|
||||||
|
in
|
||||||
|
( { model | shareModel = result.model }
|
||||||
|
, Cmd.map ShareMsg result.cmd
|
||||||
|
, Sub.map ShareMsg result.sub
|
||||||
|
)
|
||||||
|
|
||||||
|
Nothing ->
|
||||||
|
( model, Cmd.none, Sub.none )
|
||||||
|
|
||||||
|
|
||||||
|
updateItemDetail : Messages -> Page.ItemDetail.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
||||||
|
updateItemDetail texts lmsg model =
|
||||||
let
|
let
|
||||||
inav =
|
inav =
|
||||||
Page.Home.Data.itemNav model.itemDetailModel.detail.item.id model.homeModel
|
Page.Home.Data.itemNav model.itemDetailModel.detail.item.id model.homeModel
|
||||||
@ -334,12 +383,12 @@ updateItemDetail lmsg model =
|
|||||||
}
|
}
|
||||||
|
|
||||||
( hm, hc, hs ) =
|
( hm, hc, hs ) =
|
||||||
updateHome (Page.Home.Data.SetLinkTarget result.linkTarget) model_
|
updateHome texts (Page.Home.Data.SetLinkTarget result.linkTarget) model_
|
||||||
|
|
||||||
( hm1, hc1, hs1 ) =
|
( hm1, hc1, hs1 ) =
|
||||||
case result.removedItem of
|
case result.removedItem of
|
||||||
Just removedId ->
|
Just removedId ->
|
||||||
updateHome (Page.Home.Data.RemoveItem removedId) hm
|
updateHome texts (Page.Home.Data.RemoveItem removedId) hm
|
||||||
|
|
||||||
Nothing ->
|
Nothing ->
|
||||||
( hm, hc, hs )
|
( hm, hc, hs )
|
||||||
@ -402,8 +451,8 @@ updateQueue lmsg model =
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
updateUserSettings : Page.UserSettings.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
updateUserSettings : Messages -> Page.UserSettings.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
||||||
updateUserSettings lmsg model =
|
updateUserSettings texts lmsg model =
|
||||||
let
|
let
|
||||||
result =
|
result =
|
||||||
Page.UserSettings.Update.update model.flags model.uiSettings lmsg model.userSettingsModel
|
Page.UserSettings.Update.update model.flags model.uiSettings lmsg model.userSettingsModel
|
||||||
@ -414,7 +463,7 @@ updateUserSettings lmsg model =
|
|||||||
( lm2, lc2, s2 ) =
|
( lm2, lc2, s2 ) =
|
||||||
case result.newSettings of
|
case result.newSettings of
|
||||||
Just sett ->
|
Just sett ->
|
||||||
applyClientSettings model_ sett
|
applyClientSettings texts model_ sett
|
||||||
|
|
||||||
Nothing ->
|
Nothing ->
|
||||||
( model_, Cmd.none, Sub.none )
|
( model_, Cmd.none, Sub.none )
|
||||||
@ -431,17 +480,18 @@ updateUserSettings lmsg model =
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
updateCollSettings : Page.CollectiveSettings.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
updateCollSettings : Messages -> Page.CollectiveSettings.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
||||||
updateCollSettings lmsg model =
|
updateCollSettings texts lmsg model =
|
||||||
let
|
let
|
||||||
( lm, lc ) =
|
( lm, lc, ls ) =
|
||||||
Page.CollectiveSettings.Update.update model.flags
|
Page.CollectiveSettings.Update.update texts.collectiveSettings
|
||||||
|
model.flags
|
||||||
lmsg
|
lmsg
|
||||||
model.collSettingsModel
|
model.collSettingsModel
|
||||||
in
|
in
|
||||||
( { model | collSettingsModel = lm }
|
( { model | collSettingsModel = lm }
|
||||||
, Cmd.map CollSettingsMsg lc
|
, Cmd.map CollSettingsMsg lc
|
||||||
, Sub.none
|
, Sub.map CollSettingsMsg ls
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -464,8 +514,8 @@ updateLogin lmsg model =
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
updateHome : Page.Home.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
updateHome : Messages -> Page.Home.Data.Msg -> Model -> ( Model, Cmd Msg, Sub Msg )
|
||||||
updateHome lmsg model =
|
updateHome texts lmsg model =
|
||||||
let
|
let
|
||||||
mid =
|
mid =
|
||||||
case model.page of
|
case model.page of
|
||||||
@ -476,7 +526,7 @@ updateHome lmsg model =
|
|||||||
Nothing
|
Nothing
|
||||||
|
|
||||||
result =
|
result =
|
||||||
Page.Home.Update.update mid model.key model.flags model.uiSettings lmsg model.homeModel
|
Page.Home.Update.update mid model.key model.flags texts.home model.uiSettings lmsg model.homeModel
|
||||||
|
|
||||||
model_ =
|
model_ =
|
||||||
{ model | homeModel = result.model }
|
{ model | homeModel = result.model }
|
||||||
@ -484,7 +534,7 @@ updateHome lmsg model =
|
|||||||
( lm, lc, ls ) =
|
( lm, lc, ls ) =
|
||||||
case result.newSettings of
|
case result.newSettings of
|
||||||
Just sett ->
|
Just sett ->
|
||||||
applyClientSettings model_ sett
|
applyClientSettings texts model_ sett
|
||||||
|
|
||||||
Nothing ->
|
Nothing ->
|
||||||
( model_, Cmd.none, Sub.none )
|
( model_, Cmd.none, Sub.none )
|
||||||
@ -518,11 +568,14 @@ initPage model_ page =
|
|||||||
let
|
let
|
||||||
model =
|
model =
|
||||||
{ model_ | page = page }
|
{ model_ | page = page }
|
||||||
|
|
||||||
|
texts =
|
||||||
|
Messages.get <| App.Data.getUiLanguage model
|
||||||
in
|
in
|
||||||
case page of
|
case page of
|
||||||
HomePage ->
|
HomePage ->
|
||||||
Util.Update.andThen2
|
Util.Update.andThen2
|
||||||
[ updateHome Page.Home.Data.Init
|
[ updateHome texts Page.Home.Data.Init
|
||||||
, updateQueue Page.Queue.Data.StopRefresh
|
, updateQueue Page.Queue.Data.StopRefresh
|
||||||
]
|
]
|
||||||
model
|
model
|
||||||
@ -536,7 +589,7 @@ initPage model_ page =
|
|||||||
CollectiveSettingPage ->
|
CollectiveSettingPage ->
|
||||||
Util.Update.andThen2
|
Util.Update.andThen2
|
||||||
[ updateQueue Page.Queue.Data.StopRefresh
|
[ updateQueue Page.Queue.Data.StopRefresh
|
||||||
, updateCollSettings Page.CollectiveSettings.Data.Init
|
, updateCollSettings texts Page.CollectiveSettings.Data.Init
|
||||||
]
|
]
|
||||||
model
|
model
|
||||||
|
|
||||||
@ -564,7 +617,33 @@ initPage model_ page =
|
|||||||
|
|
||||||
ItemDetailPage id ->
|
ItemDetailPage id ->
|
||||||
Util.Update.andThen2
|
Util.Update.andThen2
|
||||||
[ updateItemDetail (Page.ItemDetail.Data.Init id)
|
[ updateItemDetail texts (Page.ItemDetail.Data.Init id)
|
||||||
, updateQueue Page.Queue.Data.StopRefresh
|
, updateQueue Page.Queue.Data.StopRefresh
|
||||||
]
|
]
|
||||||
model
|
model
|
||||||
|
|
||||||
|
SharePage id ->
|
||||||
|
let
|
||||||
|
cmd =
|
||||||
|
Cmd.map ShareMsg (Page.Share.Data.initCmd id model.flags)
|
||||||
|
|
||||||
|
shareModel =
|
||||||
|
model.shareModel
|
||||||
|
in
|
||||||
|
if shareModel.initialized then
|
||||||
|
( model, Cmd.none, Sub.none )
|
||||||
|
|
||||||
|
else
|
||||||
|
( { model | shareModel = { shareModel | initialized = True } }, cmd, Sub.none )
|
||||||
|
|
||||||
|
ShareDetailPage _ _ ->
|
||||||
|
case model_.page of
|
||||||
|
SharePage _ ->
|
||||||
|
let
|
||||||
|
verifyResult =
|
||||||
|
model.shareModel.verifyResult
|
||||||
|
in
|
||||||
|
updateShareDetail (Page.ShareDetail.Data.VerifyResp (Ok verifyResult)) model
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
( model, Cmd.none, Sub.none )
|
||||||
|
@ -27,6 +27,8 @@ import Page.ManageData.View2 as ManageData
|
|||||||
import Page.NewInvite.View2 as NewInvite
|
import Page.NewInvite.View2 as NewInvite
|
||||||
import Page.Queue.View2 as Queue
|
import Page.Queue.View2 as Queue
|
||||||
import Page.Register.View2 as Register
|
import Page.Register.View2 as Register
|
||||||
|
import Page.Share.View as Share
|
||||||
|
import Page.ShareDetail.View as ShareDetail
|
||||||
import Page.Upload.View2 as Upload
|
import Page.Upload.View2 as Upload
|
||||||
import Page.UserSettings.View2 as UserSettings
|
import Page.UserSettings.View2 as UserSettings
|
||||||
import Styles as S
|
import Styles as S
|
||||||
@ -41,14 +43,10 @@ view model =
|
|||||||
|
|
||||||
topNavbar : Model -> Html Msg
|
topNavbar : Model -> Html Msg
|
||||||
topNavbar model =
|
topNavbar model =
|
||||||
case model.flags.account of
|
case Data.Flags.getAccount model.flags of
|
||||||
Just acc ->
|
Just acc ->
|
||||||
if acc.success then
|
|
||||||
topNavUser acc model
|
topNavUser acc model
|
||||||
|
|
||||||
else
|
|
||||||
topNavAnon model
|
|
||||||
|
|
||||||
Nothing ->
|
Nothing ->
|
||||||
topNavAnon model
|
topNavAnon model
|
||||||
|
|
||||||
@ -72,7 +70,7 @@ topNavUser auth model =
|
|||||||
, baseStyle = "font-bold inline-flex items-center px-4 py-2"
|
, baseStyle = "font-bold inline-flex items-center px-4 py-2"
|
||||||
, activeStyle = "hover:bg-blue-200 dark:hover:bg-bluegray-800 w-12"
|
, activeStyle = "hover:bg-blue-200 dark:hover:bg-bluegray-800 w-12"
|
||||||
}
|
}
|
||||||
, headerNavItem model
|
, headerNavItem True model
|
||||||
, div [ class "flex flex-grow justify-end" ]
|
, div [ class "flex flex-grow justify-end" ]
|
||||||
[ userMenu texts.app auth model
|
[ userMenu texts.app auth model
|
||||||
, dataMenu texts.app auth model
|
, dataMenu texts.app auth model
|
||||||
@ -86,7 +84,16 @@ topNavAnon model =
|
|||||||
[ id "top-nav"
|
[ id "top-nav"
|
||||||
, class styleTopNav
|
, class styleTopNav
|
||||||
]
|
]
|
||||||
[ headerNavItem model
|
[ B.genericButton
|
||||||
|
{ label = ""
|
||||||
|
, icon = "fa fa-bars"
|
||||||
|
, handler = onClick ToggleSidebar
|
||||||
|
, disabled = not (Page.hasSidebar model.page)
|
||||||
|
, attrs = [ href "#" ]
|
||||||
|
, baseStyle = "font-bold inline-flex items-center px-4 py-2"
|
||||||
|
, activeStyle = "hover:bg-blue-200 dark:hover:bg-bluegray-800 w-12"
|
||||||
|
}
|
||||||
|
, headerNavItem False model
|
||||||
, div [ class "flex flex-grow justify-end" ]
|
, div [ class "flex flex-grow justify-end" ]
|
||||||
[ langMenu model
|
[ langMenu model
|
||||||
, a
|
, a
|
||||||
@ -100,11 +107,24 @@ topNavAnon model =
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
headerNavItem : Model -> Html Msg
|
headerNavItem : Bool -> Model -> Html Msg
|
||||||
headerNavItem model =
|
headerNavItem authenticated model =
|
||||||
|
let
|
||||||
|
tag =
|
||||||
|
if authenticated then
|
||||||
a
|
a
|
||||||
[ class "inline-flex font-bold hover:bg-blue-200 dark:hover:bg-bluegray-800 items-center px-4"
|
|
||||||
, Page.href HomePage
|
else
|
||||||
|
div
|
||||||
|
in
|
||||||
|
tag
|
||||||
|
[ class "inline-flex font-bold items-center px-4"
|
||||||
|
, classList [ ( "hover:bg-blue-200 dark:hover:bg-bluegray-800", authenticated ) ]
|
||||||
|
, if authenticated then
|
||||||
|
Page.href HomePage
|
||||||
|
|
||||||
|
else
|
||||||
|
href "#"
|
||||||
]
|
]
|
||||||
[ img
|
[ img
|
||||||
[ src (model.flags.config.docspellAssetPath ++ "/img/logo-96.png")
|
[ src (model.flags.config.docspellAssetPath ++ "/img/logo-96.png")
|
||||||
@ -157,6 +177,12 @@ mainContent model =
|
|||||||
|
|
||||||
ItemDetailPage id ->
|
ItemDetailPage id ->
|
||||||
viewItemDetail texts id model
|
viewItemDetail texts id model
|
||||||
|
|
||||||
|
SharePage id ->
|
||||||
|
viewShare texts id model
|
||||||
|
|
||||||
|
ShareDetailPage shareId itemId ->
|
||||||
|
viewShareDetail texts shareId itemId model
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -411,6 +437,49 @@ dropdownMenu =
|
|||||||
" absolute right-0 bg-white dark:bg-bluegray-800 border dark:border-bluegray-700 dark:text-bluegray-300 shadow-lg opacity-1 transition duration-200 min-w-max "
|
" absolute right-0 bg-white dark:bg-bluegray-800 border dark:border-bluegray-700 dark:text-bluegray-300 shadow-lg opacity-1 transition duration-200 min-w-max "
|
||||||
|
|
||||||
|
|
||||||
|
viewShare : Messages -> String -> Model -> List (Html Msg)
|
||||||
|
viewShare texts shareId model =
|
||||||
|
[ Html.map ShareMsg
|
||||||
|
(Share.viewSidebar texts.share
|
||||||
|
model.sidebarVisible
|
||||||
|
model.flags
|
||||||
|
model.uiSettings
|
||||||
|
model.shareModel
|
||||||
|
)
|
||||||
|
, Html.map ShareMsg
|
||||||
|
(Share.viewContent texts.share
|
||||||
|
model.flags
|
||||||
|
model.version
|
||||||
|
model.uiSettings
|
||||||
|
shareId
|
||||||
|
model.shareModel
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
viewShareDetail : Messages -> String -> String -> Model -> List (Html Msg)
|
||||||
|
viewShareDetail texts shareId itemId model =
|
||||||
|
[ Html.map ShareDetailMsg
|
||||||
|
(ShareDetail.viewSidebar texts.shareDetail
|
||||||
|
model.sidebarVisible
|
||||||
|
model.flags
|
||||||
|
model.uiSettings
|
||||||
|
shareId
|
||||||
|
itemId
|
||||||
|
model.shareDetailModel
|
||||||
|
)
|
||||||
|
, Html.map ShareDetailMsg
|
||||||
|
(ShareDetail.viewContent texts.shareDetail
|
||||||
|
model.flags
|
||||||
|
model.uiSettings
|
||||||
|
model.version
|
||||||
|
shareId
|
||||||
|
itemId
|
||||||
|
model.shareDetailModel
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
viewHome : Messages -> Model -> List (Html Msg)
|
viewHome : Messages -> Model -> List (Html Msg)
|
||||||
viewHome texts model =
|
viewHome texts model =
|
||||||
[ Html.map HomeMsg
|
[ Html.map HomeMsg
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user