mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-10-03 09:56:58 +00:00
Compare commits
5 Commits
nightly
...
e3faff7acf
Author | SHA1 | Date | |
---|---|---|---|
e3faff7acf
|
|||
ad61dae136
|
|||
53160d34bf
|
|||
66dad6a300
|
|||
8b0dfaedc1
|
@@ -1,2 +0,0 @@
|
|||||||
# Scala Steward: Reformat with scalafmt 3.8.2
|
|
||||||
1c566cd5182d41f4cc06040fc347ddb4be617779
|
|
42
.github/release-drafter.yml
vendored
42
.github/release-drafter.yml
vendored
@@ -1,42 +0,0 @@
|
|||||||
name-template: "$RESOLVED_VERSION"
|
|
||||||
tag-template: "$RESOLVED_VERSION"
|
|
||||||
template: |
|
|
||||||
## What’s Changed
|
|
||||||
|
|
||||||
$CHANGES
|
|
||||||
|
|
||||||
categories:
|
|
||||||
- title: "🚀 Features"
|
|
||||||
labels:
|
|
||||||
- 'feature'
|
|
||||||
- 'enhancement'
|
|
||||||
- title: "🐛 Bug Fixes"
|
|
||||||
labels:
|
|
||||||
- 'fix'
|
|
||||||
- 'bug'
|
|
||||||
- title: "💚 Maintenance"
|
|
||||||
labels:
|
|
||||||
- 'chore'
|
|
||||||
- 'documentation'
|
|
||||||
- title: "🧱 Dependencies"
|
|
||||||
labels:
|
|
||||||
- 'dependencies'
|
|
||||||
- 'type: dependencies'
|
|
||||||
|
|
||||||
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
|
|
||||||
version-resolver:
|
|
||||||
major:
|
|
||||||
labels:
|
|
||||||
- 'breaking'
|
|
||||||
minor:
|
|
||||||
labels:
|
|
||||||
- 'feature'
|
|
||||||
- 'enhancement'
|
|
||||||
patch:
|
|
||||||
labels:
|
|
||||||
- 'chore'
|
|
||||||
- 'documentation'
|
|
||||||
- 'dependencies'
|
|
||||||
default: patch
|
|
||||||
exclude-labels:
|
|
||||||
- 'skip-changelog'
|
|
2
.github/renovate.json
vendored
2
.github/renovate.json
vendored
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"automerge": true,
|
"automerge": true,
|
||||||
"labels": ["dependencies"],
|
"labels": ["type: dependencies"],
|
||||||
"packageRules": [
|
"packageRules": [
|
||||||
{
|
{
|
||||||
"matchManagers": [
|
"matchManagers": [
|
||||||
|
16
.github/stale.yml
vendored
Normal file
16
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# Number of days of inactivity before an issue becomes stale
|
||||||
|
daysUntilStale: 30
|
||||||
|
# Number of days of inactivity before a stale issue is closed
|
||||||
|
daysUntilClose: 7
|
||||||
|
onlyLabels:
|
||||||
|
- question
|
||||||
|
# Label to use when marking an issue as stale
|
||||||
|
staleLabel: stale
|
||||||
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
|
markComment: >
|
||||||
|
This issue has been automatically marked as stale because it has not
|
||||||
|
had recent activity. It will be closed if no further activity
|
||||||
|
occurs. This only applies to 'question' issues. Always feel free to
|
||||||
|
reopen or create new issues. Thank you!
|
||||||
|
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||||
|
closeComment: false
|
15
.github/workflows/ci-docs.yml
vendored
15
.github/workflows/ci-docs.yml
vendored
@@ -6,13 +6,20 @@ on:
|
|||||||
- "master"
|
- "master"
|
||||||
jobs:
|
jobs:
|
||||||
check-website:
|
check-website:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.7
|
- uses: actions/checkout@v4.1.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v27
|
|
||||||
- name: Set current version
|
- name: Set current version
|
||||||
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
|
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
|
||||||
|
- uses: jorelali/setup-elm@v5
|
||||||
|
with:
|
||||||
|
elm-version: 0.19.1
|
||||||
|
- uses: cachix/install-nix-action@v25
|
||||||
|
with:
|
||||||
|
nix_path: nixpkgs=channel:nixos-23.05
|
||||||
|
- name: Print nixpkgs version
|
||||||
|
run: nix-instantiate --eval -E '(import <nixpkgs> {}).lib.version'
|
||||||
- name: Build website (${{ env.DOCSPELL_VERSION }})
|
- name: Build website (${{ env.DOCSPELL_VERSION }})
|
||||||
run: nix develop .#ci --command sbt make-website
|
run: nix-shell website/shell.nix --run "sbt make-website"
|
||||||
|
20
.github/workflows/ci.yml
vendored
20
.github/workflows/ci.yml
vendored
@@ -5,18 +5,30 @@ on:
|
|||||||
- master
|
- master
|
||||||
jobs:
|
jobs:
|
||||||
ci-matrix:
|
ci-matrix:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
java: [ 'openjdk@1.17' ]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.7
|
- uses: actions/checkout@v4.1.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 100
|
fetch-depth: 100
|
||||||
|
- uses: jorelali/setup-elm@v5
|
||||||
|
with:
|
||||||
|
elm-version: 0.19.1
|
||||||
|
- uses: bahmutov/npm-install@v1
|
||||||
|
with:
|
||||||
|
working-directory: modules/webapp
|
||||||
- name: Fetch tags
|
- name: Fetch tags
|
||||||
run: git fetch --depth=100 origin +refs/tags/*:refs/tags/*
|
run: git fetch --depth=100 origin +refs/tags/*:refs/tags/*
|
||||||
- uses: cachix/install-nix-action@v27
|
- uses: olafurpg/setup-scala@v14
|
||||||
|
with:
|
||||||
|
java-version: ${{ matrix.java }}
|
||||||
|
# - name: Coursier cache
|
||||||
|
# uses: coursier/cache-action@v6
|
||||||
- name: sbt ci ${{ github.ref }}
|
- name: sbt ci ${{ github.ref }}
|
||||||
run: nix develop .#ci --command sbt ci
|
run: sbt ci
|
||||||
ci:
|
ci:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
needs: [ci-matrix]
|
needs: [ci-matrix]
|
||||||
|
4
.github/workflows/docker-image.yml
vendored
4
.github/workflows/docker-image.yml
vendored
@@ -4,9 +4,9 @@ on:
|
|||||||
types: [ published ]
|
types: [ published ]
|
||||||
jobs:
|
jobs:
|
||||||
docker-images:
|
docker-images:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.7
|
- uses: actions/checkout@v4.1.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Set current version
|
- name: Set current version
|
||||||
|
14
.github/workflows/release-drafter.yml
vendored
14
.github/workflows/release-drafter.yml
vendored
@@ -1,14 +0,0 @@
|
|||||||
name: Release Drafter
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update_release_draft:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: release-drafter/release-drafter@v6
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
22
.github/workflows/release-nightly.yml
vendored
22
.github/workflows/release-nightly.yml
vendored
@@ -5,20 +5,32 @@ on:
|
|||||||
- "master"
|
- "master"
|
||||||
jobs:
|
jobs:
|
||||||
release-nightly:
|
release-nightly:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
|
matrix:
|
||||||
|
java: [ 'openjdk@1.17' ]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.7
|
- uses: actions/checkout@v4.1.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v27
|
- uses: olafurpg/setup-scala@v14
|
||||||
|
with:
|
||||||
|
java-version: ${{ matrix.java }}
|
||||||
|
- uses: jorelali/setup-elm@v5
|
||||||
|
with:
|
||||||
|
elm-version: 0.19.1
|
||||||
|
- uses: bahmutov/npm-install@v1
|
||||||
|
with:
|
||||||
|
working-directory: modules/webapp
|
||||||
|
# - name: Coursier cache
|
||||||
|
# uses: coursier/cache-action@v6
|
||||||
- name: Set current version
|
- name: Set current version
|
||||||
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
|
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
|
||||||
- name: sbt ci ${{ github.ref }}
|
- name: sbt ci ${{ github.ref }}
|
||||||
run: nix develop .#ci --command sbt ci
|
run: sbt ci
|
||||||
- name: sbt make-pkg (${{ env.DOCSPELL_VERSION }})
|
- name: sbt make-pkg (${{ env.DOCSPELL_VERSION }})
|
||||||
run: nix develop .#ci --command sbt make-pkg
|
run: sbt make-pkg
|
||||||
- uses: "marvinpinto/action-automatic-releases@latest"
|
- uses: "marvinpinto/action-automatic-releases@latest"
|
||||||
with:
|
with:
|
||||||
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
repo_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
20
.github/workflows/release.yml
vendored
20
.github/workflows/release.yml
vendored
@@ -5,18 +5,30 @@ on:
|
|||||||
- 'v*'
|
- 'v*'
|
||||||
jobs:
|
jobs:
|
||||||
release:
|
release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
|
matrix:
|
||||||
|
java: [ 'openjdk@1.17' ]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.7
|
- uses: actions/checkout@v4.1.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v27
|
- uses: olafurpg/setup-scala@v14
|
||||||
|
with:
|
||||||
|
java-version: ${{ matrix.java }}
|
||||||
|
- uses: jorelali/setup-elm@v5
|
||||||
|
with:
|
||||||
|
elm-version: 0.19.1
|
||||||
|
- uses: bahmutov/npm-install@v1
|
||||||
|
with:
|
||||||
|
working-directory: modules/webapp
|
||||||
|
# - name: Coursier cache
|
||||||
|
# uses: coursier/cache-action@v6
|
||||||
- name: Set current version
|
- name: Set current version
|
||||||
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
|
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
|
||||||
- name: sbt make-pkg (${{ env.DOCSPELL_VERSION }})
|
- name: sbt make-pkg (${{ env.DOCSPELL_VERSION }})
|
||||||
run: nix develop .#ci --command sbt make-pkg
|
run: sbt make-pkg
|
||||||
- uses: meeDamian/github-release@2.0
|
- uses: meeDamian/github-release@2.0
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
21
.github/workflows/stale.yml
vendored
21
.github/workflows/stale.yml
vendored
@@ -1,21 +0,0 @@
|
|||||||
name: 'Handle stale issues'
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '30 1 * * *'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
stale:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
# https://github.com/actions/stale
|
|
||||||
- uses: actions/stale@v9
|
|
||||||
with:
|
|
||||||
days-before-stale: 30
|
|
||||||
days-before-close: 7
|
|
||||||
only-labels: question
|
|
||||||
stale-issue-label: stale
|
|
||||||
stale-issue-message: >
|
|
||||||
This issue has been automatically marked as stale because it has not
|
|
||||||
had recent activity. It will be closed if no further activity
|
|
||||||
occurs. This only applies to 'question' issues. Always feel free to
|
|
||||||
reopen or create new issues. Thank you!
|
|
17
.github/workflows/website.yml
vendored
17
.github/workflows/website.yml
vendored
@@ -5,17 +5,24 @@ on:
|
|||||||
- "current-docs"
|
- "current-docs"
|
||||||
jobs:
|
jobs:
|
||||||
publish-website:
|
publish-website:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4.1.7
|
- uses: actions/checkout@v4.1.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v27
|
|
||||||
- name: Set current version
|
- name: Set current version
|
||||||
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
|
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
|
||||||
|
- uses: jorelali/setup-elm@v5
|
||||||
|
with:
|
||||||
|
elm-version: 0.19.1
|
||||||
|
- uses: cachix/install-nix-action@v25
|
||||||
|
with:
|
||||||
|
nix_path: nixpkgs=channel:nixos-23.05
|
||||||
|
- name: Print nixpkgs version
|
||||||
|
run: nix-instantiate --eval -E '(import <nixpkgs> {}).lib.version'
|
||||||
- name: Build website (${{ env.DOCSPELL_VERSION }})
|
- name: Build website (${{ env.DOCSPELL_VERSION }})
|
||||||
run: nix develop .#ci --command sbt make-website
|
run: nix-shell website/shell.nix --run "sbt make-website"
|
||||||
- name: Publish website (${{ env.DOCSPELL_VERSION }})
|
- name: Publish website (${{ env.DOCSPELL_VERSION }})
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: nix develop .#ci --command sbt publish-website
|
run: sbt publish-website
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,5 +1,4 @@
|
|||||||
#artwork/*.png
|
#artwork/*.png
|
||||||
.envrc
|
|
||||||
target/
|
target/
|
||||||
local/
|
local/
|
||||||
node_modules/
|
node_modules/
|
||||||
|
@@ -6,7 +6,7 @@ pull_request_rules:
|
|||||||
assign:
|
assign:
|
||||||
users: [eikek]
|
users: [eikek]
|
||||||
label:
|
label:
|
||||||
add: ["dependencies"]
|
add: ["type: dependencies"]
|
||||||
- name: automatically merge Scala Steward PRs on CI success
|
- name: automatically merge Scala Steward PRs on CI success
|
||||||
conditions:
|
conditions:
|
||||||
- author=eikek-scala-steward[bot]
|
- author=eikek-scala-steward[bot]
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
version = "3.8.2"
|
version = "3.7.17"
|
||||||
|
|
||||||
preset = default
|
preset = default
|
||||||
align.preset = some
|
align.preset = some
|
||||||
|
@@ -1020,7 +1020,7 @@ Additionally there are some other minor features and bug fixes.
|
|||||||
to be able to add a request header. Check [this for
|
to be able to add a request header. Check [this for
|
||||||
firefox](https://addons.mozilla.org/en-US/firefox/addon/modheader-firefox/)
|
firefox](https://addons.mozilla.org/en-US/firefox/addon/modheader-firefox/)
|
||||||
or [this for
|
or [this for
|
||||||
chromium](https://chromewebstore.google.com/detail/modheader-modify-http-hea/idgpnmonknjnojddfkpgkljpfnnfcklj)
|
chromium](https://chrome.google.com/webstore/detail/modheader/idgpnmonknjnojddfkpgkljpfnnfcklj)
|
||||||
- then add the request header `Docspell-Ui` with value `1`.
|
- then add the request header `Docspell-Ui` with value `1`.
|
||||||
Reloading the page gets you back the old ui.
|
Reloading the page gets you back the old ui.
|
||||||
- With new Web-UI, certain features and fixes were realized, but not
|
- With new Web-UI, certain features and fixes were realized, but not
|
||||||
|
23
build.sbt
23
build.sbt
@@ -15,14 +15,11 @@ val scalafixSettings = Seq(
|
|||||||
|
|
||||||
val sharedSettings = Seq(
|
val sharedSettings = Seq(
|
||||||
organization := "com.github.eikek",
|
organization := "com.github.eikek",
|
||||||
scalaVersion := "2.13.14",
|
scalaVersion := "2.13.12",
|
||||||
organizationName := "Eike K. & Contributors",
|
organizationName := "Eike K. & Contributors",
|
||||||
licenses += (
|
licenses += ("AGPL-3.0-or-later", url(
|
||||||
"AGPL-3.0-or-later",
|
"https://spdx.org/licenses/AGPL-3.0-or-later.html"
|
||||||
url(
|
)),
|
||||||
"https://spdx.org/licenses/AGPL-3.0-or-later.html"
|
|
||||||
)
|
|
||||||
),
|
|
||||||
startYear := Some(2020),
|
startYear := Some(2020),
|
||||||
headerLicenseStyle := HeaderLicenseStyle.SpdxSyntax,
|
headerLicenseStyle := HeaderLicenseStyle.SpdxSyntax,
|
||||||
headerSources / excludeFilter := HiddenFileFilter || "*.java" || "StringUtil.scala",
|
headerSources / excludeFilter := HiddenFileFilter || "*.java" || "StringUtil.scala",
|
||||||
@@ -680,11 +677,7 @@ val restapi = project
|
|||||||
openapiTargetLanguage := Language.Scala,
|
openapiTargetLanguage := Language.Scala,
|
||||||
openapiPackage := Pkg("docspell.restapi.model"),
|
openapiPackage := Pkg("docspell.restapi.model"),
|
||||||
openapiSpec := (Compile / resourceDirectory).value / "docspell-openapi.yml",
|
openapiSpec := (Compile / resourceDirectory).value / "docspell-openapi.yml",
|
||||||
openapiStaticGen := OpenApiDocGenerator.Redoc,
|
openapiStaticGen := OpenApiDocGenerator.Redoc
|
||||||
openapiRedoclyCmd := Seq("redocly-cli"),
|
|
||||||
openapiRedoclyConfig := Some(
|
|
||||||
(LocalRootProject / baseDirectory).value / "project" / "redocly.yml"
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
.dependsOn(common, query.jvm, notificationApi, jsonminiq, addonlib)
|
.dependsOn(common, query.jvm, notificationApi, jsonminiq, addonlib)
|
||||||
|
|
||||||
@@ -704,11 +697,7 @@ val joexapi = project
|
|||||||
openapiTargetLanguage := Language.Scala,
|
openapiTargetLanguage := Language.Scala,
|
||||||
openapiPackage := Pkg("docspell.joexapi.model"),
|
openapiPackage := Pkg("docspell.joexapi.model"),
|
||||||
openapiSpec := (Compile / resourceDirectory).value / "joex-openapi.yml",
|
openapiSpec := (Compile / resourceDirectory).value / "joex-openapi.yml",
|
||||||
openapiStaticGen := OpenApiDocGenerator.Redoc,
|
openapiStaticGen := OpenApiDocGenerator.Redoc
|
||||||
openapiRedoclyCmd := Seq("redocly-cli"),
|
|
||||||
openapiRedoclyConfig := Some(
|
|
||||||
(LocalRootProject / baseDirectory).value / "project" / "redocly.yml"
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
.dependsOn(common, loggingScribe, addonlib)
|
.dependsOn(common, loggingScribe, addonlib)
|
||||||
|
|
||||||
|
@@ -109,7 +109,7 @@ services:
|
|||||||
- restserver
|
- restserver
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:16.3
|
image: postgres:16.1
|
||||||
container_name: postgres_db
|
container_name: postgres_db
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
FROM alpine:3.20.2
|
FROM alpine:20231219
|
||||||
|
|
||||||
ARG version=
|
ARG version=
|
||||||
ARG joex_url=
|
ARG joex_url=
|
||||||
@@ -77,7 +77,7 @@ RUN \
|
|||||||
wget https://github.com/tesseract-ocr/tessdata/raw/main/khm.traineddata && \
|
wget https://github.com/tesseract-ocr/tessdata/raw/main/khm.traineddata && \
|
||||||
mv khm.traineddata /usr/share/tessdata
|
mv khm.traineddata /usr/share/tessdata
|
||||||
|
|
||||||
# Using these data files for japanese, because they work better. Includes vertical data. See #973 and #2445.
|
# Using these data files for japanese, because they work better. See #973
|
||||||
RUN \
|
RUN \
|
||||||
wget https://raw.githubusercontent.com/tesseract-ocr/tessdata_fast/master/jpn_vert.traineddata && \
|
wget https://raw.githubusercontent.com/tesseract-ocr/tessdata_fast/master/jpn_vert.traineddata && \
|
||||||
wget https://raw.githubusercontent.com/tesseract-ocr/tessdata_fast/master/jpn.traineddata && \
|
wget https://raw.githubusercontent.com/tesseract-ocr/tessdata_fast/master/jpn.traineddata && \
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
FROM alpine:3.20.2
|
FROM alpine:20231219
|
||||||
|
|
||||||
ARG version=
|
ARG version=
|
||||||
ARG restserver_url=
|
ARG restserver_url=
|
||||||
|
130
flake.lock
generated
130
flake.lock
generated
@@ -1,130 +0,0 @@
|
|||||||
{
|
|
||||||
"nodes": {
|
|
||||||
"devshell-tools": {
|
|
||||||
"inputs": {
|
|
||||||
"flake-utils": "flake-utils",
|
|
||||||
"nixpkgs": "nixpkgs"
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1710099997,
|
|
||||||
"narHash": "sha256-WmBKTLdth6I/D+0//9enbIXohGsBjepbjIAm9pCYj0U=",
|
|
||||||
"owner": "eikek",
|
|
||||||
"repo": "devshell-tools",
|
|
||||||
"rev": "e82faf976d318b3829f6f7f6785db6f3c7b65267",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "eikek",
|
|
||||||
"repo": "devshell-tools",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"flake-utils": {
|
|
||||||
"inputs": {
|
|
||||||
"systems": "systems"
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1709126324,
|
|
||||||
"narHash": "sha256-q6EQdSeUZOG26WelxqkmR7kArjgWCdw5sfJVHPH/7j8=",
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"rev": "d465f4819400de7c8d874d50b982301f28a84605",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"flake-utils_2": {
|
|
||||||
"inputs": {
|
|
||||||
"systems": "systems_2"
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1709126324,
|
|
||||||
"narHash": "sha256-q6EQdSeUZOG26WelxqkmR7kArjgWCdw5sfJVHPH/7j8=",
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"rev": "d465f4819400de7c8d874d50b982301f28a84605",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "numtide",
|
|
||||||
"repo": "flake-utils",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1709309926,
|
|
||||||
"narHash": "sha256-VZFBtXGVD9LWTecGi6eXrE0hJ/mVB3zGUlHImUs2Qak=",
|
|
||||||
"owner": "NixOS",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "79baff8812a0d68e24a836df0a364c678089e2c7",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "NixOS",
|
|
||||||
"ref": "nixos-23.11",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs_2": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1709677081,
|
|
||||||
"narHash": "sha256-tix36Y7u0rkn6mTm0lA45b45oab2cFLqAzDbJxeXS+c=",
|
|
||||||
"owner": "NixOS",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "880992dcc006a5e00dd0591446fdf723e6a51a64",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "NixOS",
|
|
||||||
"ref": "nixos-23.11",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": {
|
|
||||||
"inputs": {
|
|
||||||
"devshell-tools": "devshell-tools",
|
|
||||||
"flake-utils": "flake-utils_2",
|
|
||||||
"nixpkgs": "nixpkgs_2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"systems": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1681028828,
|
|
||||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
|
||||||
"owner": "nix-systems",
|
|
||||||
"repo": "default",
|
|
||||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nix-systems",
|
|
||||||
"repo": "default",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"systems_2": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1681028828,
|
|
||||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
|
||||||
"owner": "nix-systems",
|
|
||||||
"repo": "default",
|
|
||||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nix-systems",
|
|
||||||
"repo": "default",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": "root",
|
|
||||||
"version": 7
|
|
||||||
}
|
|
193
flake.nix
193
flake.nix
@@ -1,193 +0,0 @@
|
|||||||
{
|
|
||||||
description = "Docspell";
|
|
||||||
|
|
||||||
inputs = {
|
|
||||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11";
|
|
||||||
devshell-tools.url = "github:eikek/devshell-tools";
|
|
||||||
flake-utils.url = "github:numtide/flake-utils";
|
|
||||||
};
|
|
||||||
|
|
||||||
outputs = {
|
|
||||||
self,
|
|
||||||
nixpkgs,
|
|
||||||
flake-utils,
|
|
||||||
devshell-tools,
|
|
||||||
}:
|
|
||||||
flake-utils.lib.eachDefaultSystem (system: let
|
|
||||||
pkgs = nixpkgs.legacyPackages.${system};
|
|
||||||
sbt17 = pkgs.sbt.override {jre = pkgs.jdk17;};
|
|
||||||
ciPkgs = with pkgs; [
|
|
||||||
sbt17
|
|
||||||
jdk17
|
|
||||||
dpkg
|
|
||||||
elmPackages.elm
|
|
||||||
fakeroot
|
|
||||||
zola
|
|
||||||
yarn
|
|
||||||
nodejs
|
|
||||||
redocly-cli
|
|
||||||
tailwindcss
|
|
||||||
];
|
|
||||||
devshellPkgs =
|
|
||||||
ciPkgs
|
|
||||||
++ (with pkgs; [
|
|
||||||
jq
|
|
||||||
scala-cli
|
|
||||||
netcat
|
|
||||||
wget
|
|
||||||
which
|
|
||||||
inotifyTools
|
|
||||||
]);
|
|
||||||
docspellPkgs = pkgs.callPackage (import ./nix/pkg.nix) {};
|
|
||||||
dockerAmd64 = pkgs.pkgsCross.gnu64.callPackage (import ./nix/docker.nix) {
|
|
||||||
inherit (docspellPkgs) docspell-restserver docspell-joex;
|
|
||||||
};
|
|
||||||
dockerArm64 = pkgs.pkgsCross.aarch64-multiplatform.callPackage (import ./nix/docker.nix) {
|
|
||||||
inherit (docspellPkgs) docspell-restserver docspell-joex;
|
|
||||||
};
|
|
||||||
in {
|
|
||||||
formatter = pkgs.alejandra;
|
|
||||||
|
|
||||||
packages = {
|
|
||||||
inherit (docspellPkgs) docspell-restserver docspell-joex;
|
|
||||||
};
|
|
||||||
|
|
||||||
legacyPackages = {
|
|
||||||
docker = {
|
|
||||||
amd64 = {
|
|
||||||
inherit (dockerAmd64) docspell-restserver docspell-joex;
|
|
||||||
};
|
|
||||||
arm64 = {
|
|
||||||
inherit (dockerArm64) docspell-restserver docspell-joex;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
checks = {
|
|
||||||
build-server = self.packages.${system}.docspell-restserver;
|
|
||||||
build-joex = self.packages.${system}.docspell-joex;
|
|
||||||
|
|
||||||
test = with import (nixpkgs + "/nixos/lib/testing-python.nix")
|
|
||||||
{
|
|
||||||
inherit system;
|
|
||||||
};
|
|
||||||
makeTest {
|
|
||||||
name = "docspell";
|
|
||||||
nodes = {
|
|
||||||
machine = {...}: {
|
|
||||||
nixpkgs.overlays = [self.overlays.default];
|
|
||||||
imports = [
|
|
||||||
self.nixosModules.default
|
|
||||||
./nix/checks
|
|
||||||
];
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
testScript = builtins.readFile ./nix/checks/testScript.py;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
devShells = {
|
|
||||||
dev-cnt = pkgs.mkShellNoCC {
|
|
||||||
buildInputs =
|
|
||||||
(builtins.attrValues devshell-tools.legacyPackages.${system}.cnt-scripts)
|
|
||||||
++ devshellPkgs;
|
|
||||||
|
|
||||||
DOCSPELL_ENV = "dev";
|
|
||||||
DEV_CONTAINER = "docsp-dev";
|
|
||||||
SBT_OPTS = "-Xmx2G -Xss4m";
|
|
||||||
};
|
|
||||||
dev-vm = pkgs.mkShellNoCC {
|
|
||||||
buildInputs =
|
|
||||||
(builtins.attrValues devshell-tools.legacyPackages.${system}.vm-scripts)
|
|
||||||
++ devshellPkgs;
|
|
||||||
|
|
||||||
DOCSPELL_ENV = "dev";
|
|
||||||
SBT_OPTS = "-Xmx2G -Xss4m";
|
|
||||||
DEV_VM = "dev-vm";
|
|
||||||
VM_SSH_PORT = "10022";
|
|
||||||
};
|
|
||||||
ci = pkgs.mkShellNoCC {
|
|
||||||
buildInputs = ciPkgs;
|
|
||||||
SBT_OPTS = "-Xmx2G -Xss4m";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
})
|
|
||||||
// {
|
|
||||||
nixosModules = {
|
|
||||||
default = {...}: {
|
|
||||||
imports = [
|
|
||||||
./nix/modules/server.nix
|
|
||||||
./nix/modules/joex.nix
|
|
||||||
];
|
|
||||||
};
|
|
||||||
server = import ./nix/modules/server.nix;
|
|
||||||
joex = import ./nix/modules/joex.nix;
|
|
||||||
};
|
|
||||||
|
|
||||||
overlays.default = final: prev: let
|
|
||||||
docspellPkgs = final.callPackage (import ./nix/pkg.nix) {};
|
|
||||||
in {
|
|
||||||
inherit (docspellPkgs) docspell-restserver docspell-joex;
|
|
||||||
};
|
|
||||||
|
|
||||||
nixosConfigurations = {
|
|
||||||
test-vm = devshell-tools.lib.mkVm {
|
|
||||||
system = "x86_64-linux";
|
|
||||||
modules = [
|
|
||||||
self.nixosModules.default
|
|
||||||
{
|
|
||||||
nixpkgs.overlays = [self.overlays.default];
|
|
||||||
}
|
|
||||||
./nix/test-vm.nix
|
|
||||||
];
|
|
||||||
};
|
|
||||||
docsp-dev = devshell-tools.lib.mkContainer {
|
|
||||||
system = "x86_64-linux";
|
|
||||||
modules = [
|
|
||||||
{
|
|
||||||
services.dev-postgres = {
|
|
||||||
enable = true;
|
|
||||||
databases = ["docspell"];
|
|
||||||
};
|
|
||||||
services.dev-email.enable = true;
|
|
||||||
services.dev-minio.enable = true;
|
|
||||||
services.dev-solr = {
|
|
||||||
enable = true;
|
|
||||||
cores = ["docspell"];
|
|
||||||
};
|
|
||||||
}
|
|
||||||
];
|
|
||||||
};
|
|
||||||
dev-vm = devshell-tools.lib.mkVm {
|
|
||||||
system = "x86_64-linux";
|
|
||||||
modules = [
|
|
||||||
{
|
|
||||||
networking.hostName = "dev-vm";
|
|
||||||
virtualisation.memorySize = 2048;
|
|
||||||
|
|
||||||
services.dev-postgres = {
|
|
||||||
enable = true;
|
|
||||||
databases = ["docspell"];
|
|
||||||
};
|
|
||||||
services.dev-email.enable = true;
|
|
||||||
services.dev-minio.enable = true;
|
|
||||||
services.dev-solr = {
|
|
||||||
enable = true;
|
|
||||||
cores = ["docspell"];
|
|
||||||
heap = 512;
|
|
||||||
};
|
|
||||||
port-forward.ssh = 10022;
|
|
||||||
port-forward.dev-postgres = 6534;
|
|
||||||
port-forward.dev-smtp = 10025;
|
|
||||||
port-forward.dev-imap = 10143;
|
|
||||||
port-forward.dev-webmail = 8080;
|
|
||||||
port-forward.dev-minio-api = 9000;
|
|
||||||
port-forward.dev-minio-console = 9001;
|
|
||||||
port-forward.dev-solr = 8983;
|
|
||||||
}
|
|
||||||
];
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
1
kubernetes/helm/docspell/.gitignore
vendored
Normal file
1
kubernetes/helm/docspell/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
charts/
|
23
kubernetes/helm/docspell/.helmignore
Normal file
23
kubernetes/helm/docspell/.helmignore
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Patterns to ignore when building packages.
|
||||||
|
# This supports shell glob matching, relative path matching, and
|
||||||
|
# negation (prefixed with !). Only one pattern per line.
|
||||||
|
.DS_Store
|
||||||
|
# Common VCS dirs
|
||||||
|
.git/
|
||||||
|
.gitignore
|
||||||
|
.bzr/
|
||||||
|
.bzrignore
|
||||||
|
.hg/
|
||||||
|
.hgignore
|
||||||
|
.svn/
|
||||||
|
# Common backup files
|
||||||
|
*.swp
|
||||||
|
*.bak
|
||||||
|
*.tmp
|
||||||
|
*.orig
|
||||||
|
*~
|
||||||
|
# Various IDEs
|
||||||
|
.project
|
||||||
|
.idea/
|
||||||
|
*.tmproj
|
||||||
|
.vscode/
|
6
kubernetes/helm/docspell/Chart.lock
Normal file
6
kubernetes/helm/docspell/Chart.lock
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
dependencies:
|
||||||
|
- name: postgresql
|
||||||
|
repository: oci://registry-1.docker.io/bitnamicharts
|
||||||
|
version: 14.0.5
|
||||||
|
digest: sha256:9a9fa6721983b212cf90cfaff02c7c001423c19d339a1a77ca59f157b1ce3ff5
|
||||||
|
generated: "2024-02-16T12:52:41.783414782+01:00"
|
19
kubernetes/helm/docspell/Chart.yaml
Normal file
19
kubernetes/helm/docspell/Chart.yaml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
apiVersion: v2
|
||||||
|
name: docspell
|
||||||
|
description: Chart for eikek's Docspell document management system
|
||||||
|
icon: https://raw.githubusercontent.com/eikek/docspell/master/artwork/logo-only.svg
|
||||||
|
|
||||||
|
version: 0.0.1
|
||||||
|
appVersion: v0.41.0
|
||||||
|
|
||||||
|
sources:
|
||||||
|
- "https://github.com/eikek/docspell/"
|
||||||
|
|
||||||
|
maintainers:
|
||||||
|
- name: TheAnachronism
|
||||||
|
|
||||||
|
dependencies:
|
||||||
|
- name: postgresql
|
||||||
|
repository: oci://registry-1.docker.io/bitnamicharts
|
||||||
|
version: 14.0.5
|
||||||
|
condition: postgresql.enabled
|
39
kubernetes/helm/docspell/templates/_configs.tpl
Normal file
39
kubernetes/helm/docspell/templates/_configs.tpl
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
{{/*Postgres Access*/}}
|
||||||
|
{{- define "postgresql.jdbcUrl" -}}
|
||||||
|
{{- if (index .Values "postgresql").enabled -}}
|
||||||
|
{{- $port := .Values.postgresql.global.postgresql.service.postgresql | toString -}}
|
||||||
|
{{- $database := .Values.postgresql.global.postgresql.auth.database -}}
|
||||||
|
{{- printf "jdbc:postgresql://%s-postgresql:%s/%s" .Release.Name $port $database -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*JDBC Connection*/}}
|
||||||
|
{{- define "docspell.secrets.JDBC" -}}
|
||||||
|
{{- if .context.Values.postgresql.enabled -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_BACKEND_JDBC" -}}
|
||||||
|
{{- if eq .type "joex" -}}
|
||||||
|
{{- $envPrefix = "DOCSPELL_JOEX_JDBC" -}}
|
||||||
|
{{- end }}
|
||||||
|
{{ $envPrefix }}_USER: {{ .context.Values.postgresql.global.postgresql.auth.username }}
|
||||||
|
{{- if not .context.Values.postgresql.global.postgresql.auth.existingSecret }}
|
||||||
|
{{ $envPrefix }}_PASSWORD: {{ .context.Values.postgresql.global.postgresql.auth.password }}
|
||||||
|
{{- end }}
|
||||||
|
{{ $envPrefix }}_URL: {{ include "postgresql.jdbcUrl" .context }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Full Text Search ettings*/}}
|
||||||
|
{{- define "docspell.config.fullTextSearch" -}}
|
||||||
|
{{- if .context.Values.docspell.fullTextSearch.enabled -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_FULL__TEXT__SEARCH" -}}
|
||||||
|
{{- if eq .type "joex" -}}
|
||||||
|
{{- $envPrefix = "DOCSPELL_JOEX_FULL__TEXT__SEARCH" -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{ printf "%s_ENABLED: %s" $envPrefix ( .context.Values.docspell.fullTextSearch.enabled | quote) }}
|
||||||
|
{{ printf "%s_SOLR_URL: http://%s-solr:%s/solr/docspell" $envPrefix (include "docspell.fullname" .context) ( .context.Values.solr.service.port | toString )}}
|
||||||
|
{{ printf "%s_SOLR_COMMIT__WITHIN: %s" $envPrefix ( .context.Values.docspell.fullTextSearch.solr.commitWithin | quote) }}
|
||||||
|
{{ printf "%s_SOLR_LOG__VERBOSE: %s" $envPrefix ( .context.Values.docspell.fullTextSearch.solr.logVerbose | quote ) }}
|
||||||
|
{{ printf "%s_SOLR_DEF__TYPE: %s" $envPrefix ( .context.Values.docspell.fullTextSearch.solr.defType | quote) }}
|
||||||
|
{{ printf "%s_SOLR_Q_OP: %s" $envPrefix ( .context.Values.docspell.fullTextSearch.solr.qOp | quote) }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
60
kubernetes/helm/docspell/templates/_helpers.tpl
Normal file
60
kubernetes/helm/docspell/templates/_helpers.tpl
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
{{/*Expand the name of the chart.*/}}
|
||||||
|
{{- define "docspell.name" -}}
|
||||||
|
{{- .Chart.Name | trunc 63 | trimSuffix "-" }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Create a default fully qualified app name.
|
||||||
|
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||||
|
If release name contains chart name it will be used as a full name.
|
||||||
|
*/}}
|
||||||
|
{{- define "docspell.fullname" -}}
|
||||||
|
{{- $name := .Chart.Name }}
|
||||||
|
{{- if contains $name .Release.Name }}
|
||||||
|
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||||
|
{{- else }}
|
||||||
|
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Create chart name and version as used by the chart label.*/}}
|
||||||
|
{{- define "docspell.chart" -}}
|
||||||
|
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Docker Image Registry Secret Names evaluating values as templates*/}}
|
||||||
|
{{- define "docspell.images.pullSecrets" -}}
|
||||||
|
{{- $pullSecrets := .Values.global.imagePullSecrets -}}
|
||||||
|
{{- range .Values.global.imagePullSecrets -}}
|
||||||
|
{{- $pullSecrets = append $pullSecrets (dict "name" .) -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- if (not (empty $pullSecrets)) -}}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{ toYaml $pullSecrets }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Common labels*/}}
|
||||||
|
{{- define "docspell.labels" -}}
|
||||||
|
helm.sh/chart: {{ include "docspell.chart" . }}
|
||||||
|
{{ include "docspell.selectorLabels" . }}
|
||||||
|
{{- if .Chart.AppVersion }}
|
||||||
|
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||||
|
{{- end }}
|
||||||
|
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Selector labels*/}}
|
||||||
|
{{- define "docspell.selectorLabels" -}}
|
||||||
|
app.kubernetes.io/name: {{ include "docspell.name" . }}
|
||||||
|
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Create the name of the service account to use*/}}
|
||||||
|
{{- define "docspell.serviceAccountName" -}}
|
||||||
|
{{- if .Values.serviceAccount.create }}
|
||||||
|
{{- default (include "docspell.fullname" .) .Values.serviceAccount.name }}
|
||||||
|
{{- else }}
|
||||||
|
{{- default "default" .Values.serviceAccount.name }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
43
kubernetes/helm/docspell/templates/ingress.yaml
Normal file
43
kubernetes/helm/docspell/templates/ingress.yaml
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
{{- if .Values.ingress.enabled -}}
|
||||||
|
{{- $fullname := include "docspell.fullname" . -}}
|
||||||
|
{{- $context := . -}}
|
||||||
|
apiVersion: {{ default "networking.k8s.io/v1" .Values.ingress.apiVersion }}
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: {{ $fullname }}
|
||||||
|
labels:
|
||||||
|
{{- include "docspell.labels" . | nindent 4 }}
|
||||||
|
annotations:
|
||||||
|
{{- range $key, $value := .Values.ingress.annotations }}
|
||||||
|
{{ $key }}: {{ $value | quote }}
|
||||||
|
{{- end }}
|
||||||
|
spec:
|
||||||
|
{{- if .Values.ingress.className }}
|
||||||
|
ingressClassName: {{ .Values.ingress.className }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.ingress.tls }}
|
||||||
|
tls:
|
||||||
|
{{- range .Values.ingress.tls }}
|
||||||
|
- host:
|
||||||
|
{{- range .hosts }}
|
||||||
|
- {{ tpl . $ | quote }}
|
||||||
|
{{- end }}
|
||||||
|
secretName: {{ .secretName }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
rules:
|
||||||
|
{{- range .Values.ingress.hosts }}
|
||||||
|
- host: {{ tpl .host $ | quote }}
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
{{- range .paths }}
|
||||||
|
- path: {{ .path }}
|
||||||
|
pathType: {{ .pathType }}
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: {{ include "docspell.fullname" $context }}-restserver
|
||||||
|
port:
|
||||||
|
name: http
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
125
kubernetes/helm/docspell/templates/joex/_configs.tpl
Normal file
125
kubernetes/helm/docspell/templates/joex/_configs.tpl
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
{{/*App ID*/}}
|
||||||
|
{{- define "docspell.joex.config.appId" -}}
|
||||||
|
{{- $appId := .Values.docspell.joex.appId | default (printf "%s-joex" (include "docspell.fullname" .)) -}}
|
||||||
|
{{- print $appId -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Base URL
|
||||||
|
*/}}
|
||||||
|
{{- define "docspell.joex.config.baseUrl" -}}
|
||||||
|
{{- $service := printf "%s-joex" (include "docspell.fullname" .) -}}
|
||||||
|
{{- $port := .Values.joex.service.port | toString -}}
|
||||||
|
{{- printf "http://%s:%s" $service $port -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Bind Config*/}}
|
||||||
|
{{- define "docspell.joex.config.bind" -}}
|
||||||
|
{{- if not (eq .Values.joex.service.port .Values.docspell.joex.bind.port) -}}
|
||||||
|
{{- fail "Joex and it's service don't have to use the same port, no connection will be possible." -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_JOEX_BIND" -}}
|
||||||
|
{{ $envPrefix }}_ADDRESS: {{ .Values.docspell.joex.bind.address | quote }}
|
||||||
|
{{ $envPrefix }}_PORT: {{ .Values.docspell.joex.bind.port | quote }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Logging Config*/}}
|
||||||
|
{{- define "docspell.joex.config.logging" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_JOEX_LOGGING" -}}
|
||||||
|
{{ $envPrefix }}_FORMAT: {{ .Values.docspell.joex.logging.format }}
|
||||||
|
{{ $envPrefix }}_MINIMUM__LEVEL: {{ .Values.docspell.joex.logging.minimumLevel }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*JDBC Connection*/}}
|
||||||
|
{{- define "docspell.joex.config.JDBC" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_JOEX_JDBC" -}}
|
||||||
|
{{ $envPrefix }}_USER: {{ .Values.postgresql.global.postgresql.auth.username }}
|
||||||
|
{{ $envPrefix }}_PASSWORD: {{ .Values.postgresql.global.postgresql.auth.password }}
|
||||||
|
{{ $envPrefix }}_URL: {{ include "postgresql.jdbcUrl" . }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Database Schema Settings*/}}
|
||||||
|
{{- define "docspell.joex.config.databaseSchema" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_JOEX_DATABASE__SCHEMA" -}}
|
||||||
|
{{ $envPrefix }}_RUN__MAIN__MIGRATIONS: {{ .Values.docspell.joex.databaseSchema.runMainMigrations | quote }}
|
||||||
|
{{ $envPrefix }}_RUN__FIXUP__MIGRATIONS: {{ .Values.docspell.joex.databaseSchema.runFixupMigrations | quote }}
|
||||||
|
{{ $envPrefix }}_REPAIR__SCHEMA: {{ .Values.docspell.joex.databaseSchema.repairSchema | quote }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Scheduler Settings*/}}
|
||||||
|
{{- define "docspell.joex.config.scheduler" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_JOEX_SCHEDULER" -}}
|
||||||
|
{{ $envPrefix }}_NAME: {{ default (include "docspell.joex.config.appId" .) .Values.docspell.joex.scheduler.name }}
|
||||||
|
{{ $envPrefix }}_POOL__SIZE: {{ .Values.docspell.joex.scheduler.poolSize | quote }}
|
||||||
|
{{ $envPrefix }}_COUNTING__SCHEME: {{ .Values.docspell.joex.scheduler.countingScheme | quote }}
|
||||||
|
{{ $envPrefix }}_RETRIES: {{ .Values.docspell.joex.scheduler.retries | quote }}
|
||||||
|
{{ $envPrefix }}_RETRY__DELAY: {{ .Values.docspell.joex.scheduler.retryDelay | quote }}
|
||||||
|
{{ $envPrefix }}_LOG__BUFFER__SIZE: {{ .Values.docspell.joex.scheduler.logBufferSize | quote }}
|
||||||
|
{{ $envPrefix }}_WAKEUP__PERIOD: {{ .Values.docspell.joex.scheduler.wakeupPeriod | quote }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*PeriodScheduler Settings*/}}
|
||||||
|
{{- define "docspell.joex.config.periodicScheduler" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_JOEX_PERIODIC__SCHEDULER" -}}
|
||||||
|
{{ $envPrefix }}_NAME: {{ default (include "docspell.joex.config.appId" .) .Values.docspell.joex.periodicScheduler.name }}
|
||||||
|
{{ $envPrefix }}_WAKEUP__PERIOD: {{ .Values.docspell.joex.periodicScheduler.wakeupPeriod | quote }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*User Tasks Settings*/}}
|
||||||
|
{{- define "docspell.joex.config.userTasks" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_JOEX_USER__TASKS_SCAN__MAILBOX" -}}
|
||||||
|
{{ $envPrefix }}_MAX__FOLDERS: {{ .Values.docspell.joex.userTasks.scanMailbox.maxFolders | quote }}
|
||||||
|
{{ $envPrefix }}_MAIL__CHUNK__SIZE: {{ .Values.docspell.joex.userTasks.scanMailbox.mailChunkSize | quote }}
|
||||||
|
{{ $envPrefix }}_MAX__MAILS: {{ .Values.docspell.joex.userTasks.scanMailbox.maxMails | quote }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*House Keeping Settings*/}}
|
||||||
|
{{- define "docspell.joex.config.houseKeeping" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_JOEX_HOUSE__KEEPING" -}}
|
||||||
|
{{ $envPrefix }}_SCHEDULE: {{ .Values.docspell.joex.houseKeeping.schedule | quote }}
|
||||||
|
{{ $envPrefix }}_CLEANUP__INVITES_ENABLED: {{ .Values.docspell.joex.houseKeeping.cleanupInvites.enabled | quote }}
|
||||||
|
{{ $envPrefix }}_CLEANUP__INVITES_OLDER__THAN: {{ .Values.docspell.joex.houseKeeping.cleanupInvites.olderThan | quote }}
|
||||||
|
{{ $envPrefix }}_CLEANUP__REMEMBER__ME_ENABLED: {{ .Values.docspell.joex.houseKeeping.cleanupRememberMe.enabled | quote }}
|
||||||
|
{{ $envPrefix }}_CLEANUP__REMEMBER__ME_OLDER__THAN: {{ .Values.docspell.joex.houseKeeping.cleanupRememberMe.olderThan | quote }}
|
||||||
|
{{ $envPrefix }}_CLEANUP__JOBS_ENABLED: {{ .Values.docspell.joex.houseKeeping.cleanupJobs.enabled | quote }}
|
||||||
|
{{ $envPrefix }}_CLEANUP__JOBS_OLDER__THAN: {{ .Values.docspell.joex.houseKeeping.cleanupJobs.olderThan | quote }}
|
||||||
|
{{ $envPrefix }}_CLEANUP__JOBS_DELETE__BATCH: {{ .Values.docspell.joex.houseKeeping.cleanupJobs.deleteBatch | quote }}
|
||||||
|
{{ $envPrefix }}_CLEANUP__DOWNLOADS_ENABLED: {{ .Values.docspell.joex.houseKeeping.cleanupDownloads.enabled | quote }}
|
||||||
|
{{ $envPrefix }}_CLEANUP__DOWNLOADS_OLDER__THAN: {{ .Values.docspell.joex.houseKeeping.cleanupDownloads.olderThan | quote }}
|
||||||
|
{{ $envPrefix }}_CLEANUP__NODES_ENABLED: {{ .Values.docspell.joex.houseKeeping.cleanupNodes.enabled | quote }}
|
||||||
|
{{ $envPrefix }}_CLEANUP__NODES_MIN__NOT__FOUND: {{ .Values.docspell.joex.houseKeeping.cleanupNodes.minNotFound |quote }}
|
||||||
|
{{ $envPrefix }}_INTEGRITY__CHECK_ENABLED: {{ .Values.docspell.joex.houseKeeping.integrityCheck.enabled | quote }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Update Check Settings*/}}
|
||||||
|
{{- define "docspell.joex.config.updateCheck" -}}
|
||||||
|
{{- if and .Values.docspell.joex.updateCheck.enabled (not .Values.docspell.joex.updateCheck.recipients) -}}
|
||||||
|
{{- fail "Update check recipients have to be set when enabling update check" -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_JOEX_UPDATE__CHECK" -}}
|
||||||
|
{{ $envPrefix }}_ENABLED: {{ .Values.docspell.joex.updateCheck.enabled | quote }}
|
||||||
|
{{ $envPrefix }}_TEST__RUN: {{ .Values.docspell.joex.updateCheck.testRun | quote }}
|
||||||
|
{{ $envPrefix }}_SCHEDULE: {{ .Values.docspell.joex.updateCheck.schedule | quote }}
|
||||||
|
{{- if .Values.docspell.joex.updateCheck.senderAccount }}
|
||||||
|
{{ $envPrefix }}_SENDER__ACOUNT: {{ .Values.docspell.joex.updateCheck.senderAccount }}
|
||||||
|
{{ $envPrefix }}_SMTP__ID: {{ .Values.docspell.joex.updateCheck.smtpId }}
|
||||||
|
{{- end }}
|
||||||
|
{{- range $index, $recipient := .Values.docspell.joex.updateCheck.recipients }}
|
||||||
|
{{ $envPrefix }}_RECIPIENTS_{{ $index }}: {{ $recipient }}
|
||||||
|
{{- end }}
|
||||||
|
{{ $envPrefix }}_SUBJECT: {{ .Values.docspell.joex.updateCheck.subject }}
|
||||||
|
{{ $envPrefix }}_BODY: | {{ .Values.docspell.joex.updateCheck.body | nindent 4 }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Convert Settings*/}}
|
||||||
|
{{- define "docspell.joex.config.convert" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_JOEX_CONVERT" -}}
|
||||||
|
{{ $envPrefix }}_HTML__CONVERTER: {{ .Values.docspell.joex.convert.htmlConverter }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Full Text Search Settings*/}}
|
||||||
|
{{- define "docspell.joex.config.fullTextSearch" -}}
|
||||||
|
{{- if .Values.docspell.fullTextSearch.enabled -}}
|
||||||
|
DOCSPELL_JOEX_FULL__TEXT__SEARCH_MIGRATION_INDEX__ALL__CHUNK: {{ .Values.docspell.joex.fullTextSearch.migration.indexAllChink | quote }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
@@ -0,0 +1,10 @@
|
|||||||
|
{{- define "docspell.joex.secrets.existingSecrets" -}}
|
||||||
|
{{/*PostgreSQL Password*/}}
|
||||||
|
{{- if .Values.postgresql.global.postgresql.auth.existingSecret -}}
|
||||||
|
- name: DOCSPELL_JOEX_JDBC_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ .Values.postgresql.global.postgresql.auth.existingSecret }}
|
||||||
|
key: {{ .Values.postgresql.global.postgresql.auth.secretKeys.userPasswordKey | default "password" }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
29
kubernetes/helm/docspell/templates/joex/_helpers.tpl
Normal file
29
kubernetes/helm/docspell/templates/joex/_helpers.tpl
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{{/*Common labels*/}}
|
||||||
|
{{- define "joex.labels" -}}
|
||||||
|
helm.sh/chart: {{ include "docspell.chart" . }}
|
||||||
|
app: {{ include "docspell.name" . }}-joex
|
||||||
|
{{ include "joex.selectorLabels" . }}
|
||||||
|
app.kubernetes.io/version: {{ .Values.joex.image.tag | default .Chart.AppVersion | quote }}
|
||||||
|
version: {{ .Values.joex.image.tag | default .Chart.AppVersion | quote }}
|
||||||
|
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Selector labels*/}}
|
||||||
|
{{- define "joex.selectorLabels" -}}
|
||||||
|
app.kubernetes.io/name: {{ include "docspell.name" . }}-joex
|
||||||
|
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Create joex image name and tag used by the deployment*/}}
|
||||||
|
{{- define "joex.image" -}}
|
||||||
|
{{- $registry := .Values.global.imageRegistry | default .Values.joex.image.registry -}}
|
||||||
|
{{- $repository := .Values.joex.image.repository -}}
|
||||||
|
{{- $separator := ":" -}}
|
||||||
|
{{- $tag := .Values.joex.image.tag | default .Chart.AppVersion -}}
|
||||||
|
{{- if $registry -}}
|
||||||
|
{{- printf "%s/%s%s%s" $registry $repository $separator $tag -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s%s%s" $repository $separator $tag -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
23
kubernetes/helm/docspell/templates/joex/config.yaml
Normal file
23
kubernetes/helm/docspell/templates/joex/config.yaml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
kind: ConfigMap
|
||||||
|
apiVersion: v1
|
||||||
|
metadata:
|
||||||
|
name: {{ include "docspell.fullname" . }}-joex
|
||||||
|
labels:
|
||||||
|
{{- include "joex.labels" . | nindent 4 }}
|
||||||
|
data:
|
||||||
|
DOCSPELL_JOEX_APP__ID: {{ include "docspell.joex.config.appId" . }}
|
||||||
|
DOCSPELL_JOEX_BASE__URL: {{ include "docspell.joex.config.baseUrl" . }}
|
||||||
|
{{- include "docspell.joex.config.bind" . | nindent 4 }}
|
||||||
|
{{- include "docspell.joex.config.logging" . | nindent 4 }}
|
||||||
|
DOCSPELL_JOEX_MAIL__DEBUG: {{ .Values.docspell.joex.mailDebug | quote }}
|
||||||
|
{{- include "docspell.joex.config.databaseSchema" . | nindent 4 }}
|
||||||
|
{{- include "docspell.joex.config.scheduler" . | nindent 4 }}
|
||||||
|
{{- include "docspell.joex.config.periodicScheduler" . | nindent 4 }}
|
||||||
|
{{- include "docspell.joex.config.userTasks" . | nindent 4 }}
|
||||||
|
{{- include "docspell.joex.config.houseKeeping" . | nindent 4 }}
|
||||||
|
{{- include "docspell.joex.config.updateCheck" . | nindent 4 }}
|
||||||
|
{{- include "docspell.joex.config.convert" . | nindent 4 }}
|
||||||
|
{{- if .Values.docspell.fullTextSearch.enabled -}}
|
||||||
|
{{- include "docspell.config.fullTextSearch" (dict "context" . "type" "joex") | nindent 4 }}
|
||||||
|
{{- include "docspell.joex.config.fullTextSearch" . | nindent 4}}
|
||||||
|
{{- end }}
|
63
kubernetes/helm/docspell/templates/joex/deployment.yaml
Normal file
63
kubernetes/helm/docspell/templates/joex/deployment.yaml
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ include "docspell.fullname" . }}-joex
|
||||||
|
annotations:
|
||||||
|
{{- if .Values.joex.deployment.annotations }}
|
||||||
|
{{- toYaml .Values.joex.deployment.annotations | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
labels:
|
||||||
|
{{- include "joex.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.joex.replicaCount }}
|
||||||
|
strategy:
|
||||||
|
type: {{ .Values.joex.strategy.type }}
|
||||||
|
{{- if eq .Values.joex.strategy.type "RollingUpdate" }}
|
||||||
|
rollingUpdate:
|
||||||
|
maxUnavailable: {{ .Values.joex.strategy.rollingUpdate.maxUnavailable }}
|
||||||
|
maxSurge: {{ .Values.joex.strategy.rollingUpdate.maxSurge }}
|
||||||
|
{{- end }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "joex.selectorLabels" . | nindent 6 }}
|
||||||
|
{{- if .Values.joex.deployment.labels }}
|
||||||
|
{{- toYaml .Values.joex.deployment.labels | nindent 6 }}
|
||||||
|
{{- end }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
annotations:
|
||||||
|
checksum/config: {{ include (print $.Template.BasePath "/joex/config.yaml") . | sha256sum }}
|
||||||
|
checksum/secret: {{ include (print $.Template.BasePath "/joex/secret.yaml") . | sha256sum }}
|
||||||
|
{{- with .Values.joex.podAnnotations }}
|
||||||
|
{{- toYaml . | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
labels:
|
||||||
|
{{- include "joex.labels" . | nindent 8 }}
|
||||||
|
{{- if .Values.joex.deployment.labels }}
|
||||||
|
{{- toYaml .Values.joex.deployment.labels | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
spec:
|
||||||
|
{{- if or .Values.serviceAccount.create .Values.serviceAccount.name }}
|
||||||
|
serviceAccountName: {{ include "docspell.serviceAccountName" . }}
|
||||||
|
{{- end }}
|
||||||
|
terminationGracePeriodSeconds: {{ .Values.joex.deployment.terminationGracePeriodSeconds }}
|
||||||
|
containers:
|
||||||
|
- name: joex
|
||||||
|
image: "{{ include "joex.image" . }}"
|
||||||
|
imagePullPolicy: {{ .Values.joex.image.pullPolicy }}
|
||||||
|
{{- with .Values.joex.additionalArgs }}
|
||||||
|
args:
|
||||||
|
{{- toYaml . | nindent 10 }}
|
||||||
|
{{- end }}
|
||||||
|
ports:
|
||||||
|
- containerPort: {{ .Values.joex.service.port }}
|
||||||
|
name: http
|
||||||
|
env:
|
||||||
|
{{- include "docspell.joex.secrets.existingSecrets" . | nindent 10 }}
|
||||||
|
envFrom:
|
||||||
|
- configMapRef:
|
||||||
|
name: {{ include "docspell.fullname" . }}-joex
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "docspell.fullname" . }}-joex-secret
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.joex.resources | nindent 12 }}
|
9
kubernetes/helm/docspell/templates/joex/secret.yaml
Normal file
9
kubernetes/helm/docspell/templates/joex/secret.yaml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Secret
|
||||||
|
metadata:
|
||||||
|
name: {{ include "docspell.fullname" . }}-joex-secret
|
||||||
|
labels:
|
||||||
|
{{- include "joex.labels" . | nindent 4}}
|
||||||
|
type: Opaque
|
||||||
|
stringData:
|
||||||
|
{{- include "docspell.secrets.JDBC" (dict "context" . "type" "joex") | nindent 4 -}}
|
20
kubernetes/helm/docspell/templates/joex/service.yaml
Normal file
20
kubernetes/helm/docspell/templates/joex/service.yaml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: {{ include "docspell.fullname" . }}-joex
|
||||||
|
labels:
|
||||||
|
{{- include "joex.labels" . | nindent 4 }}
|
||||||
|
{{- if .Values.joex.service.labels }}
|
||||||
|
{{- toYaml .Values.joex.serivce.labels | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
annotations:
|
||||||
|
{{- toYaml .Values.joex.service.annotations | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
type: {{ .Values.joex.service.type }}
|
||||||
|
ports:
|
||||||
|
- port: {{ .Values.joex.service.port }}
|
||||||
|
targetPort: {{ .Values.joex.service.targetPort | default .Values.joex.service.port }}
|
||||||
|
protocol: TCP
|
||||||
|
name: http
|
||||||
|
selector:
|
||||||
|
{{- include "joex.selectorLabels" . | nindent 6 -}}
|
174
kubernetes/helm/docspell/templates/restserver/_configs.tpl
Normal file
174
kubernetes/helm/docspell/templates/restserver/_configs.tpl
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
{{/*App ID*/}}
|
||||||
|
{{- define "docspell.server.config.appId" -}}
|
||||||
|
{{- $appId := .Values.docspell.server.appId | default (printf "%s-restserver" (include "docspell.fullname" .)) -}}
|
||||||
|
{{- print $appId -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Logging Config*/}}
|
||||||
|
{{- define "docspell.server.config.logging" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_LOGGING" -}}
|
||||||
|
{{ $envPrefix }}_FORMAT: {{ .Values.docspell.server.logging.format }}
|
||||||
|
{{ $envPrefix }}_MINIMUM__LEVEL: {{ .Values.docspell.server.logging.minimumLevel }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Bind Config*/}}
|
||||||
|
{{- define "docspell.server.config.bind" -}}
|
||||||
|
{{- if not (eq .Values.restserver.service.port .Values.docspell.server.bind.port) -}}
|
||||||
|
{{- fail "The restserver and it's service don't have to use the same port, no connection will be possible." -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_BIND" -}}
|
||||||
|
{{ $envPrefix }}_ADDRESS: {{ .Values.docspell.server.bind.address | quote }}
|
||||||
|
{{ $envPrefix }}_PORT: {{ .Values.docspell.server.bind.port | quote }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Auth Config*/}}
|
||||||
|
{{- define "docspell.server.config.auth" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_AUTH" -}}
|
||||||
|
{{ $envPrefix }}_SESSION__VALID: {{ .Values.docspell.server.auth.sessionValid | quote }}
|
||||||
|
{{ $envPrefix }}_REMEMBER__ME_ENABLED: {{ .Values.docspell.server.auth.rememberMe.enabled | quote }}
|
||||||
|
{{ $envPrefix }}_REMEMBER__ME_VALID: {{ .Values.docspell.server.auth.rememberMe.valid | quote }}
|
||||||
|
{{ $envPrefix }}_ON__ACCOUNT__SOURCE__CONFLICT: {{ .Values.docspell.server.auth.onAccountSourceConflict }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Auth Secrets*/}}
|
||||||
|
{{- define "docspell.server.secrets.auth" -}}
|
||||||
|
{{- if .Values.docspell.server.auth.serverSecret -}}
|
||||||
|
{{- if and .Values.docspell.server.auth.serverSecret.value .Values.docspell.server.auth.serverSecret.existingSecret -}}
|
||||||
|
{{- fail "Only either a fixed server secret or an existing secret should be specified" -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- with .Values.docspell.server.auth.serverSecret.value }}
|
||||||
|
DOCSPELL_SERVER_AUTH_SERVER__SECRET: {{ . }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Download Config*/}}
|
||||||
|
{{- define "docspell.server.config.donwload" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_DOWNLOAD__ALL" -}}
|
||||||
|
{{ $envPrefix }}_MAX__FILES: {{ .Values.docspell.server.donwloadAll.maxFiles | quote }}
|
||||||
|
{{ $envPrefix }}_MAX__SIZE: {{ .Values.docspell.server.donwloadAll.maxSize }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*OpenID Config*/}}
|
||||||
|
{{- define "docspell.server.config.openid" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_OPENID" -}}
|
||||||
|
{{- range $index, $entry := .Values.docspell.server.openid -}}
|
||||||
|
{{- if $entry.enabled -}}
|
||||||
|
{{ $envPrefix }}_{{ $index }}_DISPLAY: {{ $entry.display }}
|
||||||
|
{{ $envPrefix }}_{{ $index }}_ENABLED: {{ $entry.enabled | quote }}
|
||||||
|
{{ $envPrefix }}_{{ $index }}_COLLECTIVE__KEY: {{ $entry.collectiveKey }}
|
||||||
|
{{ $envPrefix }}_{{ $index }}_USER__KEY: {{ $entry.userKey }}
|
||||||
|
{{- $envPrefix = printf "%s_%s_PROVIDER" $envPrefix ($index | toString) }}
|
||||||
|
{{ $envPrefix }}_PROVIDER__ID: {{ $entry.provider.providerId }}
|
||||||
|
{{ $envPrefix }}_SCOPE: {{ $entry.provider.scope }}
|
||||||
|
{{ $envPrefix }}_AUTHORIZE__URL: {{ $entry.provider.authorizeUrl }}
|
||||||
|
{{ $envPrefix }}_TOKEN__URL: {{ $entry.provider.tokenUrl }}
|
||||||
|
{{- with $entry.provider.userUrl }}
|
||||||
|
{{ $envPrefix }}_USER__URL: {{ . }}
|
||||||
|
{{- end }}
|
||||||
|
{{ $envPrefix }}_LOGOUT__URL: {{ $entry.provider.logoutUrl }}
|
||||||
|
{{ $envPrefix }}_SIG__ALGO: {{ $entry.provider.sigAlgo }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*OpenID Secrets*/}}
|
||||||
|
{{- define "docspell.server.secrets.openid" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_OPENID" -}}
|
||||||
|
{{- range $index, $entry := .Values.docspell.server.openid -}}
|
||||||
|
{{- if and $entry.enabled (not $entry.provider.existingSecret) -}}
|
||||||
|
{{- $envPrefix = printf "%s_%s_PROVIDER" $envPrefix ($index | toString) }}
|
||||||
|
{{ $envPrefix }}_CLIENT__ID: {{ $entry.provider.clientId }}
|
||||||
|
{{ $envPrefix }}_CLIENT__SECRET: {{ $entry.provider.clientSecret }}
|
||||||
|
{{ $envPrefix }}_SIGN__KEY: {{ $entry.provider.signKey }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Database Schema Settings*/}}
|
||||||
|
{{- define "docspell.server.config.databaseSchema" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_BACKEND_DATABASE__SCHEMA" -}}
|
||||||
|
{{ $envPrefix }}_RUN__MAIN__MIGRATIONS: {{ .Values.docspell.server.backend.databaseSchema.runMainMigrations | quote }}
|
||||||
|
{{ $envPrefix }}_RUN__FIXUP__MIGRATIONS: {{ .Values.docspell.server.backend.databaseSchema.runFixupMigrations | quote }}
|
||||||
|
{{ $envPrefix }}_REPAIR__SCHEMA: {{ .Values.docspell.server.backend.databaseSchema.repairSchema | quote }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Integration Endpoint Settings*/}}
|
||||||
|
{{- define "docspell.server.config.integrationEndpoint" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_INTEGRATION__ENDPOINT" -}}
|
||||||
|
{{ $envPrefix }}_ENABLED: {{ .Values.docspell.server.integrationEndpoint.enabled | quote }}
|
||||||
|
{{ $envPrefix }}_PRIORITY: {{ .Values.docspell.server.integrationEndpoint.priority }}
|
||||||
|
{{ $envPrefix }}_SOURCE__NAME: {{ .Values.docspell.server.integrationEndpoint.sourceName }}
|
||||||
|
{{- if .Values.docspell.server.integrationEndpoint.allowedIps.enabed }}
|
||||||
|
{{ $envPrefix }}_ALLOWED__IPS_ENABLED: {{ .Values.docspell.server.integrationEndpoint.allowedIps.enabed }}
|
||||||
|
{{- range $index, $ip := .Values.docspell.server.integrationEndpoint.allowedIps.ips }}
|
||||||
|
{{ $envPrefix }}_ALLOWED__IPS_IPS_{{ $index}}: {{ $ip }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.docspell.server.integrationEndpoint.httpBasic.enabled | quote }}
|
||||||
|
{{ $envPrefix }}_HTTP__BASIC_ENABLED: {{ .Values.docspell.server.integrationEndpoint.httpBasic.enabled | quote }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.docspell.server.integrationEndpoint.httpHeader.enabled | quote }}
|
||||||
|
{{ $envPrefix }}_HTTP__HEADER_ENABLED: {{ .Values.docspell.server.integrationEndpoint.httpHeader.enabled | quote }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Integration Endpoint Secrets*/}}
|
||||||
|
{{- define "docspell.server.secrets.integrationEndpoint" -}}
|
||||||
|
{{- if .Values.docspell.server.integrationEndpoint.httpBasic.enabled -}}
|
||||||
|
{{- if and .Values.docspell.server.integrationEndpoint.httpBasic.credentials .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret -}}
|
||||||
|
{{- fail "Only either the fixed credentials or an existing secret for the httpBasic integration endpoint should be set" -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_INTEGRATION__ENDPOINT_HTTP__BASIC" -}}
|
||||||
|
{{ $envPrefix}}_REALM: {{ .Values.docspell.server.integrationEndpoint.httpBasic.realm }}
|
||||||
|
{{- with .Values.docspell.server.integrationEndpoint.httpBasic.credentials }}
|
||||||
|
{{ $envPrefix}}_USER: {{ .username }}
|
||||||
|
{{ $envPrefix}}_PASSWORD: {{ .password }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.docspell.server.integrationEndpoint.httpHeader.enabled -}}
|
||||||
|
{{- if and .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.value .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.existingSecret -}}
|
||||||
|
{{- fail "Only either the fixed header value or an existing secret for the http header ingration endpoint should be set" -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{ $envPrefix := "DOCSPELL_SERVER_INTEGRATION__ENDPOINT_HTTP__HEADER" }}
|
||||||
|
{{ $envPrefix }}_HEADER__NAME: {{ .Values.docspell.server.integrationEndpoint.httpHeader.headerName }}
|
||||||
|
{{- with .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.value -}}
|
||||||
|
{{ $envPrefix }}_HEADER__VALUE: {{ .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.value }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Admin Endpoint Secrets*/}}
|
||||||
|
{{- define "docspell.server.secrets.adminEndpoint" -}}
|
||||||
|
{{- if .Values.docspell.server.adminEndpoint.enabled -}}
|
||||||
|
{{- $context := . -}}
|
||||||
|
{{- with .Values.docspell.server.adminEndpoint.secret -}}
|
||||||
|
{{- if $context.Values.docspell.server.adminEndpoint.existingSecret }}
|
||||||
|
{{- fail "Only either the fixed value or an existing secret for the admin endpoint should be set" -}}
|
||||||
|
{{- end -}}
|
||||||
|
DOCSPELL_SERVER_ADMIN__ENDPOINT_SECRET: {{ .value }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Signup Settings*/}}
|
||||||
|
{{- define "docspell.server.config.signup" -}}
|
||||||
|
{{- $envPrefix := "DOCSPELL_SERVER_BACKEND_SIGNUP" -}}
|
||||||
|
{{ $envPrefix }}_MODE: {{ .Values.docspell.server.backend.signup.mode }}
|
||||||
|
{{- if eq .Values.docspell.server.backend.signup.mode "invite" }}
|
||||||
|
{{ $envPrefix }}_INVITE__TIME: {{ .Values.docspell.server.backend.signup.inviteTime }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Signup Secrets*/}}
|
||||||
|
{{- define "docspell.server.secrets.signup" -}}
|
||||||
|
{{- if eq .Values.docspell.server.backend.signup.mode "invite" }}
|
||||||
|
{{- $context := . -}}
|
||||||
|
{{- with .Values.docspell.server.backend.signup.newInvitePassword.value -}}
|
||||||
|
{{- if $context.Values.docspell.server.backend.signup.newInvitePassword.existingSecret -}}
|
||||||
|
{{- fail "Only either the fixed value or an existing secret for the new invite password should be set" -}}
|
||||||
|
{{- end -}}
|
||||||
|
DOCSPELL_SERVER_BACKEND_SIGNUP_NEW__INVITE__PASSWORD: {{ . }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
@@ -0,0 +1,86 @@
|
|||||||
|
{{- define "docspell.server.secrets.existingSecrets" -}}
|
||||||
|
{{/*Server Secret*/}}
|
||||||
|
{{- if .Values.docspell.server.auth.serverSecret -}}
|
||||||
|
{{- if and .Values.docspell.server.auth.serverSecret.existingSecret (not .Values.docspell.server.auth.serverSecret.value) -}}
|
||||||
|
- name: DOCSPELL_SERVER_AUTH_SERVER__SECRET
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ .Values.docspell.server.auth.serverSecret.existingSecret.name }}
|
||||||
|
key: {{ .Values.docspell.server.auth.serverSecret.existingSecret.key }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end }}
|
||||||
|
{{/*OIDC Secrets*/}}
|
||||||
|
{{- range $index, $entry := .Values.docspell.server.openid -}}
|
||||||
|
{{- if and $entry.enabled $entry.provider.existingSecret -}}
|
||||||
|
{{- $envPrefix := printf "%s_%s_PROVIDER" "DOCSPELL_SERVER_OPENID" ($index | toString) -}}
|
||||||
|
- name: {{ $envPrefix }}_CLIENT__ID
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ $entry.provider.existingSecret.name }}
|
||||||
|
key: {{ $entry.provider.existingSecret.clientIdKey }}
|
||||||
|
- name: {{ $envPrefix }}_CLIENT__SECRET
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ $entry.provider.existingSecret.name }}
|
||||||
|
key: {{ $entry.provider.existingSecret.clientSecretKey }}
|
||||||
|
- name: {{ $envPrefix }}_SIGN__KEY
|
||||||
|
{{- if $entry.provider.existingSecret.signKeyKey -}}
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ $entry.provider.existingSecret.name }}
|
||||||
|
key: {{ $entry.provider.existingSecret.signKeyKey }}
|
||||||
|
{{- else }}
|
||||||
|
value: ""
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{/*Integration Endpoint Http Basic Auth*/}}
|
||||||
|
{{- if .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret }}
|
||||||
|
- name: DOCSPELL_SERVER_INTEGRATION__ENDPOINT_HTTP__BASIC_USER
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret.name }}
|
||||||
|
key: {{ .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret.usernameKey }}
|
||||||
|
- name: DOCSPELL_SERVER_INTEGRATION__ENDPOINT_HTTP__BASIC_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret.name }}
|
||||||
|
key: {{ .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret.passwordKey }}
|
||||||
|
{{- end }}
|
||||||
|
{{/*Integration Endpoint Http Header Auth*/}}
|
||||||
|
{{- if and .Values.docspell.server.integrationEndpoint.enabled .Values.docspell.server.integrationEndpoint.httpHeader.enabled -}}
|
||||||
|
{{- if .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.existingSecret }}
|
||||||
|
- name: DOCSPELL_SERVER_INTEGRATION__ENDPOINT_HTTP__HEADER_HEADER__VALUE
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.existingSecret.name }}
|
||||||
|
key: {{ .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.existingSecret.key }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end }}
|
||||||
|
{{/*Admin Endpoint Secret*/}}
|
||||||
|
{{- with .Values.docspell.server.adminEndpoint.existingSecret }}
|
||||||
|
- name: DOCSPELL_SERVER_ADMIN__ENDPOINT_SECRET
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ .name }}
|
||||||
|
key: {{ .key }}
|
||||||
|
{{- end }}
|
||||||
|
{{/*Sign Up Invitation Generation Password*/}}
|
||||||
|
{{- if eq .Values.docspell.server.backend.signup.mode "invite" -}}
|
||||||
|
{{- with .Values.docspell.server.backend.signup.newInvitePassword.existingSecret }}
|
||||||
|
- name: DOCSPELL_SERVER_BACKEND_SIGNUP_NEW__INVITE__PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ .name }}
|
||||||
|
key: {{ .key }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end }}
|
||||||
|
{{/*PostgreSQL Password*/}}
|
||||||
|
{{- if .Values.postgresql.global.postgresql.auth.existingSecret -}}
|
||||||
|
- name: DOCSPELL_SERVER_BACKEND_JDBC_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ .Values.postgresql.global.postgresql.auth.existingSecret }}
|
||||||
|
key: {{ .Values.postgresql.global.postgresql.auth.secretKeys.userPasswordKey | default "password" }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
28
kubernetes/helm/docspell/templates/restserver/_helpers.tpl
Normal file
28
kubernetes/helm/docspell/templates/restserver/_helpers.tpl
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
{{/*Common labels*/}}
|
||||||
|
{{- define "restserver.labels" -}}
|
||||||
|
helm.sh/chart: {{ include "docspell.chart" . }}
|
||||||
|
app: {{ include "docspell.name" . }}-restserver
|
||||||
|
{{ include "restserver.selectorLabels" . }}
|
||||||
|
app.kubernetes.io/version: {{ .Values.restserver.image.tag | default .Chart.AppVersion | quote }}
|
||||||
|
version: {{ .Values.restserver.image.tag | default .Chart.AppVersion | quote }}
|
||||||
|
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Selector labels*/}}
|
||||||
|
{{- define "restserver.selectorLabels" -}}
|
||||||
|
app.kubernetes.io/name: {{ include "docspell.name" . }}-restserver
|
||||||
|
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Create restserver image name and tag used by the deployment*/}}
|
||||||
|
{{- define "restserver.image" -}}
|
||||||
|
{{- $registry := .Values.global.imageRegistry | default .Values.restserver.image.registry -}}
|
||||||
|
{{- $repository := .Values.restserver.image.repository -}}
|
||||||
|
{{- $separator := ":" -}}
|
||||||
|
{{- $tag := .Values.restserver.image.tag | default .Chart.AppVersion -}}
|
||||||
|
{{- if $registry -}}
|
||||||
|
{{- printf "%s/%s%s%s" $registry $repository $separator $tag -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s%s%s" $repository $separator $tag -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
27
kubernetes/helm/docspell/templates/restserver/config.yaml
Normal file
27
kubernetes/helm/docspell/templates/restserver/config.yaml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
kind: ConfigMap
|
||||||
|
apiVersion: v1
|
||||||
|
metadata:
|
||||||
|
name: {{ include "docspell.fullname" . }}-restserver
|
||||||
|
labels:
|
||||||
|
{{- include "restserver.labels" . | nindent 4 }}
|
||||||
|
data:
|
||||||
|
DOCSPELL_SERVER_APP__NAME: {{ .Values.docspell.server.appName }}
|
||||||
|
DOCSPELL_SERVER_APP__ID: {{ include "docspell.server.config.appId" . }}
|
||||||
|
DOCSPELL_SERVER_INTERNAL__URL: http://{{ include "docspell.fullname" . }}-restserver:{{ .Values.restserver.service.port }}
|
||||||
|
{{- include "docspell.server.config.logging" . | nindent 4 }}
|
||||||
|
{{- include "docspell.server.config.bind" . |nindent 4 }}
|
||||||
|
DOCSPELL_SERVER_MAX__ITEM__PAGE__SIZE: {{ .Values.docspell.server.maxItemPageSize | quote }}
|
||||||
|
DOCSPELL_SERVER_MAX__NOTE_LENGTH: {{ .Values.docspell.server.maxNoteLength | quote }}
|
||||||
|
DOCSPELL_SERVER_SHOW__CLASSIFICATION__SETTINGS: {{ .Values.docspell.server.showClassificationSettings | quote }}
|
||||||
|
{{- include "docspell.server.config.auth" . | nindent 4 }}
|
||||||
|
{{- include "docspell.server.config.donwload" . | nindent 4 }}
|
||||||
|
{{- include "docspell.server.config.openid" . | nindent 4 }}
|
||||||
|
{{- if .Values.docspell.server.integrationEndpoint.enabled -}}
|
||||||
|
{{- include "docspell.server.config.integrationEndpoint" . | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.docspell.fullTextSearch.enabled -}}
|
||||||
|
{{ include "docspell.config.fullTextSearch" (dict "context" . "type" "server") | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
DOCSPELL_SERVER_BACKEND_MAIL__DEBUG: {{ .Values.docspell.server.backend.mailDebug | quote }}
|
||||||
|
{{- include "docspell.server.config.databaseSchema" . | nindent 4 }}
|
||||||
|
{{- include "docspell.server.config.signup" . | nindent 4 }}
|
@@ -0,0 +1,59 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ include "docspell.fullname" . }}-restserver
|
||||||
|
annotations:
|
||||||
|
{{- if .Values.restserver.deployment.annotations }}
|
||||||
|
{{- toYaml .Values.restserver.deployment.annotations | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
labels:
|
||||||
|
{{- include "restserver.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.restserver.replicaCount }}
|
||||||
|
strategy:
|
||||||
|
type: {{ .Values.restserver.strategy.type }}
|
||||||
|
{{- if eq .Values.restserver.strategy.type "RollingUpdate" }}
|
||||||
|
rollingUpdate:
|
||||||
|
maxUnavailable: {{ .Values.restserver.strategy.rollingUpdate.maxUnavailable }}
|
||||||
|
maxSurge: {{ .Values.restserver.strategy.rollingUpdate.maxSurge }}
|
||||||
|
{{- end }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "restserver.selectorLabels" . | nindent 6 }}
|
||||||
|
{{- if .Values.restserver.deployment.labels }}
|
||||||
|
{{- toYaml .Values.restserver.deployment.labels | nindent 6 }}
|
||||||
|
{{- end }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
annotations:
|
||||||
|
checksum/config: {{ include (print $.Template.BasePath "/restserver/config.yaml") . | sha256sum }}
|
||||||
|
checksum/secret: {{ include (print $.Template.BasePath "/restserver/secret.yaml") . | sha256sum }}
|
||||||
|
{{- with .Values.restserver.podAnnotations }}
|
||||||
|
{{- toYaml . | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
labels:
|
||||||
|
{{- include "restserver.labels" . | nindent 8 }}
|
||||||
|
{{- if .Values.restserver.deployment.labels }}
|
||||||
|
{{- toYaml .Values.restserver.deployment.labels | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
spec:
|
||||||
|
{{- if (or .Values.serviceAccount.create .Values.serviceAccount.name) }}
|
||||||
|
serviceAccountName: {{ include "docspell.serviceAccountName" . }}
|
||||||
|
{{- end }}
|
||||||
|
terminationGracePeriodSeconds: {{ .Values.restserver.deployment.terminationGracePeriodSeconds }}
|
||||||
|
containers:
|
||||||
|
- name: restserver
|
||||||
|
image: "{{ include "restserver.image" . }}"
|
||||||
|
imagePullPolicy: {{ .Values.restserver.image.pullPolicy }}
|
||||||
|
ports:
|
||||||
|
- containerPort: {{ .Values.restserver.service.port }}
|
||||||
|
name: http
|
||||||
|
env:
|
||||||
|
{{- include "docspell.server.secrets.existingSecrets" . | nindent 10 }}
|
||||||
|
envFrom:
|
||||||
|
- configMapRef:
|
||||||
|
name: {{ include "docspell.fullname" . }}-restserver
|
||||||
|
- secretRef:
|
||||||
|
name: {{ include "docspell.fullname" . }}-restserver-secret
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.restserver.resources | nindent 12 }}
|
34
kubernetes/helm/docspell/templates/restserver/secret.yaml
Normal file
34
kubernetes/helm/docspell/templates/restserver/secret.yaml
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Secret
|
||||||
|
metadata:
|
||||||
|
name: {{ include "docspell.fullname" . }}-restserver-secret
|
||||||
|
labels:
|
||||||
|
{{- include "restserver.labels" . | nindent 4 }}
|
||||||
|
type: Opaque
|
||||||
|
stringData:
|
||||||
|
assertions:
|
||||||
|
{{- if gt .Values.restserver.replicaCount 1.0 }}
|
||||||
|
{{- if not .Values.docspell.server.auth.serverSecret -}}
|
||||||
|
{{- fail "If multiple replicas are running of the rest server, the server secret has to be fixed." -}}
|
||||||
|
{{- else if not (or .Values.docspell.server.auth.serverSecret.existingSecret .Values.docspell.server.auth.serverSecret.value) }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- if and .Values.docspell.server.adminEndpoint.enabled (and (not .Values.docspell.server.adminEndpoint.existingSecret) (not .Values.docspell.server.adminEndpoint.secret)) -}}
|
||||||
|
{{- fail "When enabling the administration endpoint, a value for authentication has the supplied." -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- range $entry := .Values.docspell.server.openid -}}
|
||||||
|
{{- if and (not $entry.provider.userUrl) (not $entry.provider.signKey) -}}
|
||||||
|
{{- fail (printf "Failure for %s, if no userUrl is set a signKey has to be specified." $entry.provider.providerId) -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- if eq .Values.docspell.server.backend.signup.mode "invite" -}}
|
||||||
|
{{- if not .Values.docspell.server.backend.signup.newInvitePassword -}}
|
||||||
|
{{- fail "Invite password has to be set, when using signup mode 'invite'." -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- include "docspell.server.secrets.auth" . | nindent 4 }}
|
||||||
|
{{- include "docspell.server.secrets.openid" . | nindent 4 }}
|
||||||
|
{{- include "docspell.server.secrets.integrationEndpoint" . | nindent 4 }}
|
||||||
|
{{- include "docspell.server.secrets.adminEndpoint" . | nindent 4 }}
|
||||||
|
{{- include "docspell.secrets.JDBC" (dict "context" . "type" "server") | nindent 4 -}}
|
||||||
|
{{- include "docspell.server.secrets.signup" . | nindent 4 -}}
|
20
kubernetes/helm/docspell/templates/restserver/service.yaml
Normal file
20
kubernetes/helm/docspell/templates/restserver/service.yaml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: {{ include "docspell.fullname" . }}-restserver
|
||||||
|
labels:
|
||||||
|
{{- include "restserver.labels" . | nindent 4 }}
|
||||||
|
{{- if .Values.restserver.service.labels }}
|
||||||
|
{{- toYaml .Values.restserver.serivce.labels | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
annotations:
|
||||||
|
{{- toYaml .Values.restserver.service.annotations | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
type: {{ .Values.restserver.service.type }}
|
||||||
|
ports:
|
||||||
|
- port: {{ .Values.restserver.service.port }}
|
||||||
|
targetPort: {{ .Values.restserver.service.targetPort | default .Values.restserver.service.port }}
|
||||||
|
protocol: TCP
|
||||||
|
name: http
|
||||||
|
selector:
|
||||||
|
{{- include "restserver.selectorLabels" . | nindent 6 -}}
|
20
kubernetes/helm/docspell/templates/serviceaccount.yaml
Normal file
20
kubernetes/helm/docspell/templates/serviceaccount.yaml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
{{- if .Values.serviceAccount.create }}
|
||||||
|
apiVersion: v1
|
||||||
|
kind: ServiceAccount
|
||||||
|
metadata:
|
||||||
|
name: {{ include "docspell.serviceAccountName" . }}
|
||||||
|
namespace: {{ .Release.Namespace | quote }}
|
||||||
|
labels:
|
||||||
|
{{- include "docspell.labels" . | nindent 4 }}
|
||||||
|
{{- with .Values.serviceAccount.labels }}
|
||||||
|
{{- . | toYaml | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- with .Values.serviceAccount.annotations }}
|
||||||
|
{{- . | toYaml | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
automountServiceAccountToken: {{ .Values.serviceAccount.automountServiceAccountToken }}
|
||||||
|
{{- with .Values.serviceAccount.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- . | toYaml | nindent 2 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
43
kubernetes/helm/docspell/templates/solr/_helpers.tpl
Normal file
43
kubernetes/helm/docspell/templates/solr/_helpers.tpl
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
{{/*Common labels*/}}
|
||||||
|
{{- define "solr.labels" -}}
|
||||||
|
helm.sh/chart: {{ include "docspell.chart" . }}
|
||||||
|
app: {{ include "docspell.name" . }}-solr
|
||||||
|
{{ include "solr.selectorLabels" . }}
|
||||||
|
app.kubernetes.io/version: {{ .Values.solr.image.tag | quote }}
|
||||||
|
version: {{ .Values.solr.image.tag | quote }}
|
||||||
|
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Selector labels*/}}
|
||||||
|
{{- define "solr.selectorLabels" -}}
|
||||||
|
app.kubernetes.io/name: {{ include "docspell.name" . }}-solr
|
||||||
|
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Create solr image name and tag used by the deployment*/}}
|
||||||
|
{{- define "solr.image" -}}
|
||||||
|
{{- $registry := .Values.global.imageRegistry | default .Values.solr.image.registry -}}
|
||||||
|
{{- $repository := .Values.solr.image.repository -}}
|
||||||
|
{{- $separator := ":" -}}
|
||||||
|
{{- $tag := .Values.solr.image.tag | default .Chart.AppVersion -}}
|
||||||
|
{{- if $registry -}}
|
||||||
|
{{- printf "%s/%s%s%s" $registry $repository $separator $tag -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s%s%s" $repository $separator $tag -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*Connection URL*/}}
|
||||||
|
{{- define "solr.url" -}}
|
||||||
|
{{- $port := .Values.solr.service.port | toString -}}
|
||||||
|
{{- $service := printf "%s-solr" (include "docspell.fullname" .) -}}
|
||||||
|
{{- printf "http://%s:%s/solr/docspell" $service $port -}}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*Storage Class*/}}
|
||||||
|
{{- define "solr.persistence.storageClass" -}}
|
||||||
|
{{- $storageClass := .Values.solr.persistence.storageClass | default .Values.global.storageClass -}}
|
||||||
|
{{- if $storageClass -}}
|
||||||
|
storageClassName: {{ $storageClass | quote }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
21
kubernetes/helm/docspell/templates/solr/pvc.yaml
Normal file
21
kubernetes/helm/docspell/templates/solr/pvc.yaml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{{- if and .Values.solr.enabled .Values.solr.persistence.enabled -}}
|
||||||
|
apiVersion: v1
|
||||||
|
kind: PersistentVolumeClaim
|
||||||
|
metadata:
|
||||||
|
name: {{ .Values.solr.persistence.claimName }}
|
||||||
|
namespace: {{ $.Release.Namespace }}
|
||||||
|
annotations:
|
||||||
|
{{ .Values.solr.persistence.annotations | toYaml | indent 4}}
|
||||||
|
spec:
|
||||||
|
accessModes:
|
||||||
|
{{- .Values.solr.persistence.accessModes | toYaml | nindent 4 }}
|
||||||
|
volumeMode: Filesystem
|
||||||
|
{{- include "solr.persistence.storageClass" . | nindent 2 }}
|
||||||
|
{{- with .Values.solr.persistence.volumeName }}
|
||||||
|
volumeName: {{ . }}
|
||||||
|
{{- end }}
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: {{ .Values.solr.persistence.size }}
|
||||||
|
{{- end }}
|
||||||
|
|
22
kubernetes/helm/docspell/templates/solr/service.yaml
Normal file
22
kubernetes/helm/docspell/templates/solr/service.yaml
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{{- if .Values.solr.enabled -}}
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: {{ include "docspell.fullname" . }}-solr
|
||||||
|
labels:
|
||||||
|
{{- include "solr.labels" . | nindent 4 }}
|
||||||
|
{{- if .Values.solr.service.labels }}
|
||||||
|
{{- toYaml .Values.solr.serivce.labels | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
annotations:
|
||||||
|
{{- toYaml .Values.solr.service.annotations | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
type: {{ .Values.solr.service.type }}
|
||||||
|
ports:
|
||||||
|
- port: {{ .Values.solr.service.port }}
|
||||||
|
targetPort: {{ .Values.solr.service.targetPort | default .Values.solr.service.port }}
|
||||||
|
protocol: TCP
|
||||||
|
name: solr
|
||||||
|
selector:
|
||||||
|
{{- include "solr.selectorLabels" . | nindent 4 }}
|
||||||
|
{{- end -}}
|
98
kubernetes/helm/docspell/templates/solr/statefulset.yaml
Normal file
98
kubernetes/helm/docspell/templates/solr/statefulset.yaml
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
{{- if .Values.solr.enabled -}}
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: StatefulSet
|
||||||
|
metadata:
|
||||||
|
name: {{ include "docspell.fullname" . }}-solr
|
||||||
|
annotations:
|
||||||
|
{{- if .Values.solr.statefulSet.annotations }}
|
||||||
|
{{- toYaml .Values.solr.statefulSet.annotations | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
labels:
|
||||||
|
{{- include "solr.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "solr.selectorLabels" . | nindent 6 }}
|
||||||
|
{{- if .Values.solr.statefulSet.labels }}
|
||||||
|
{{- toYaml .Values.solr.statefulSet.labels | nindent 6 }}
|
||||||
|
{{- end }}
|
||||||
|
serviceName: {{ include "docspell.fullname" . }}-solr
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
annotations:
|
||||||
|
{{- with .Values.solr.podAnnotations }}
|
||||||
|
{{- toYaml . | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
labels:
|
||||||
|
{{- include "solr.labels" . | nindent 8 }}
|
||||||
|
{{- if .Values.solr.statefulSet.labels }}
|
||||||
|
{{- toYaml .Values.solr.statefulSet.labels | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
app: solr
|
||||||
|
spec:
|
||||||
|
{{- if (or .Values.serviceAccount.create .Values.serviceAccount.name) }}
|
||||||
|
serviceAccontName: {{ include "docspell.serviceAccountName" . }}
|
||||||
|
{{- end }}
|
||||||
|
initContainers:
|
||||||
|
- name: solr-fix-permissions
|
||||||
|
image: busybox
|
||||||
|
command:
|
||||||
|
- sh
|
||||||
|
- -c
|
||||||
|
- chown -R 8983:8983 /var/solr
|
||||||
|
volumeMounts:
|
||||||
|
- name: solr-data
|
||||||
|
mountPath: /var/solr
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.solr.initContainers.resources | nindent 12 }}
|
||||||
|
terminationGracePeriodSeconds: {{ .Values.solr.statefulSet.terminationGracePeriodSeconds }}
|
||||||
|
containers:
|
||||||
|
- name: solr
|
||||||
|
image: "{{ include "solr.image" . }}"
|
||||||
|
imagePullPolicy: {{ .Values.solr.image.pullPolicy }}
|
||||||
|
env:
|
||||||
|
- name: SOLR_OPTS
|
||||||
|
value: -Dsolr.modules=analysis-extras
|
||||||
|
command:
|
||||||
|
- docker-entrypoint.sh
|
||||||
|
- solr-precreate
|
||||||
|
- docspell
|
||||||
|
ports:
|
||||||
|
- containerPort: {{ .Values.solr.service.port }}
|
||||||
|
name: solr
|
||||||
|
{{- if .Values.solr.livenessProbe.enabled }}
|
||||||
|
livenessProbe:
|
||||||
|
{{- toYaml (omit .Values.solr.livenessProbe "enabled") | nindent 10 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Values.solr.readinessProbe.enabled }}
|
||||||
|
readinessProbe:
|
||||||
|
{{- toYaml (omit .Values.solr.readinessProbe "enabled") | nindent 10 }}
|
||||||
|
{{- end }}
|
||||||
|
resources:
|
||||||
|
{{- toYaml .Values.solr.resources | nindent 10 }}
|
||||||
|
volumeMounts:
|
||||||
|
- name: solr-data
|
||||||
|
mountPath: /var/solr
|
||||||
|
{{- with .Values.global.hostAliases }}
|
||||||
|
hostAliases:
|
||||||
|
{{- toYaml . | nindent 6 }}
|
||||||
|
{{- with .Values.solr.nodeSelector }}
|
||||||
|
nodeSelector:
|
||||||
|
{{- toYaml . | nindent 6 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- with .Values.solr.affinity }}
|
||||||
|
affinity:
|
||||||
|
{{- toYaml . | nindent 6 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
volumes:
|
||||||
|
{{- if .Values.solr.persistence.enabled }}
|
||||||
|
- name: solr-data
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: {{ .Values.solr.persistence.claimName }}
|
||||||
|
{{- else if not .Values.solr.persistence.enabled }}
|
||||||
|
- name: solr-data
|
||||||
|
emptyDir: {}
|
||||||
|
{{- end }}
|
||||||
|
{{- end -}}
|
608
kubernetes/helm/docspell/values.yaml
Normal file
608
kubernetes/helm/docspell/values.yaml
Normal file
@@ -0,0 +1,608 @@
|
|||||||
|
# Default values for docspell.
|
||||||
|
# This is a YAML-formatted file.
|
||||||
|
# Declare variables to be passed to your templates.
|
||||||
|
|
||||||
|
## @section Global
|
||||||
|
#
|
||||||
|
## @param global.imageRegistry global image registry override
|
||||||
|
## @param global.imagePullSecrets global image pull secrets override; can be extended by `imagePullSecrets`
|
||||||
|
## @param global.storageClass global storage class override
|
||||||
|
## @param global.hostAliases global hostAliases which will be added to the pod's hosts files
|
||||||
|
global:
|
||||||
|
imageRegistry: ""
|
||||||
|
## E.g.
|
||||||
|
## imagePullSecrets:
|
||||||
|
## - myRegistryKeySecretName
|
||||||
|
##
|
||||||
|
imagePullSecrets: []
|
||||||
|
storageClass: ""
|
||||||
|
hostAliases: []
|
||||||
|
# - ip: 192.168.137.2
|
||||||
|
# hostnames:
|
||||||
|
# - example.com
|
||||||
|
|
||||||
|
## @section Docspell
|
||||||
|
|
||||||
|
docspell:
|
||||||
|
## @param docspell.fullTextSearch.enabled The full-text search feature can be disabled and can be re-enabled at any time
|
||||||
|
## @param docspell.fullTextSearch.backend Which backend to use, either solr or postgresql
|
||||||
|
## @param docspell.fullTextSearch.solr.commitWithin Used to tell solr when to commit the data
|
||||||
|
## @param docspell.fullTextSearch.solr.logVerbose If true, logs request and response bodies
|
||||||
|
## @param docspell.fullTextSearch.solr.defType The defType parameter to lucene that defines the parses to use. (https://solr.apache.org/guide/8_4/query-syntax-and-parsing.html#query-syntax-and-parsing)
|
||||||
|
## @param docspell.fullTextSearch.solr.qOp The default combiner for tokens (AND / OR)
|
||||||
|
fullTextSearch:
|
||||||
|
enabled: true
|
||||||
|
solr:
|
||||||
|
commitWithin: 1000
|
||||||
|
logVerbose: false
|
||||||
|
defType: lucene
|
||||||
|
qOp: OR
|
||||||
|
## @param docspell.server.appName Name of the application shown in the top right corner of the web application
|
||||||
|
## @param docspell.server.appId Id of the node
|
||||||
|
## @param docspell.server.maxItemPageSize Hard limit of batch returned items for search
|
||||||
|
## @param docspell.server.maxNoteLength Number of characters to return for each item notes when searching
|
||||||
|
## @param docspell.server.showClassificationSettings Whether the classification form in the collective settings is displayed or not
|
||||||
|
server:
|
||||||
|
appName: Docspell
|
||||||
|
appId:
|
||||||
|
maxItemPageSize: 200
|
||||||
|
maxNoteLength: 180
|
||||||
|
showClassificationSettings: true
|
||||||
|
|
||||||
|
## @param docspell.server.logging.format Format of log messages. Can be json,. Logfmt, Fancy or Plain
|
||||||
|
## @param docspell.server.logging.minimumLevel Minimum level of the log. From lowest to highest: Trace, Debug, Info, Warn, Error
|
||||||
|
logging:
|
||||||
|
format: "fancy"
|
||||||
|
minimumLevel: "Warn"
|
||||||
|
|
||||||
|
## @param docspell.server.bind.address The address the server binds to. Should be set to `0.0.0.0` as otherwise it'll reject connections from the ingress
|
||||||
|
## @param docspell.server.bind.port The port the server binds to. Make sure to use the same as in the port for the service and ingress
|
||||||
|
bind:
|
||||||
|
address: 0.0.0.0
|
||||||
|
port: 7880
|
||||||
|
## @param docspell.server.auth.serverSecret.value Secret to sign the authenticator tokens. If empty, one will be generated
|
||||||
|
## @param docspell.server.auth.serverSecret.existingSecret.name The name of an existing Kubernetes secret that contains the server secret
|
||||||
|
## @param docspell.server.auth.serverSecret.existingSecret.key The key inside the existing Kubernetes secret that contains the server secret
|
||||||
|
## @param docspell.server.auth.sessionValid How long an authentication token is valid
|
||||||
|
## @param docspell.server.auth.onAccountSourceConflict Fail if a duplicate account from an external source should fail the login. Can be: fail, convert
|
||||||
|
## @param docspell.server.auth.rememberMe.enabled Enable/disable the remember me function
|
||||||
|
## @param docspell.server.auth.rememberMe.valid How long the remember me cookie/token is valid
|
||||||
|
auth:
|
||||||
|
serverSecret:
|
||||||
|
# value: asdf
|
||||||
|
# existingSecret:
|
||||||
|
# name: "my-existing-secret"
|
||||||
|
# key: "key-inside-secret"
|
||||||
|
sessionValid: "5 minutes"
|
||||||
|
onAccountSourceConflict: fail
|
||||||
|
rememberMe:
|
||||||
|
enabled: true
|
||||||
|
valid: "30 days"
|
||||||
|
## @param docspell.server.downloadAll.maxFiles How many files to allow in "download as zip"
|
||||||
|
## @param docspell.server.downloadAll.maxSize The maximum (uncompressed) size of the zip file contents.
|
||||||
|
donwloadAll:
|
||||||
|
maxFiles: 500
|
||||||
|
maxSize: 1400M
|
||||||
|
## @param docspell.server.openid OpenID Connect (oidc) or OAuth2 authentication providers. Only the "Authorization Code Flow" is supported
|
||||||
|
openid:
|
||||||
|
- display: Keycloak
|
||||||
|
enabled: false
|
||||||
|
provider:
|
||||||
|
providerId: keycloak
|
||||||
|
clientId: docspell
|
||||||
|
clientSecret: example-secret-439e-bf06-911e4cdd56a6
|
||||||
|
authorizeUrl: http://localhost:8080/auth/realms/home/protocol/openid-connect/auth
|
||||||
|
tokenUrl: http://localhost:8080/auth/realms/home/protocol/openid-connect/token
|
||||||
|
scope: openid profile email
|
||||||
|
# User URL is not used when signature key is set
|
||||||
|
# userUrl: http://localhost:8080/auth/realms/home/protocol/openid-connect/userinfo
|
||||||
|
logoutUrl: http://localhost:8080/auth/realms/home/protocol/openid-connect/logout
|
||||||
|
signKey: b64:anVzdC1hLXRlc3Q=
|
||||||
|
sigAlgo: RS512
|
||||||
|
# existingSecret:
|
||||||
|
# name: "my-existing-secret"
|
||||||
|
# clientIdKey: clientId
|
||||||
|
# clientSecretKey: clientSecret
|
||||||
|
# signKeyKey: signKey
|
||||||
|
# The collective of the user is given in the access token as property `docspell_collective`
|
||||||
|
collectiveKey: "lookup:docspell_collective"
|
||||||
|
# The username to use for the docspell account
|
||||||
|
userKey: preferred_username
|
||||||
|
## @param docspell.server.oidcAutoRedirect When exactly one OIDC/OAuth provider is configured, then the webapp automatically redirects to its authentication page skipping the docspell login page
|
||||||
|
oidcAutoRedirect: true
|
||||||
|
## @param docspell.server.integrationEndpoint.enabled Enable endpoint to upload files to any collective
|
||||||
|
## @param docspell.server.integrationEndpoint.priorty Priority to use when submitting files through this endpoint
|
||||||
|
## @param docspell.server.integrationEndpoint.sourceName The name used for the item "source" property when uploaded through this endpoint
|
||||||
|
## @param docspell.server.integrationEndpoint.allowedIps.enabled Enable ip-allow-access-list
|
||||||
|
## @param docspell.server.integrationEndpoint.allowedIps.ips List of ips which should be added to the access list
|
||||||
|
integrationEndpoint:
|
||||||
|
enabled: true
|
||||||
|
priority: low
|
||||||
|
sourceName: integration
|
||||||
|
allowedIps:
|
||||||
|
enabed: false
|
||||||
|
ips:
|
||||||
|
# IP addresses may be specific as simple globs: a part marked as '*' matches any octet, like in `192.168.*.*`
|
||||||
|
- 127.0.0.1
|
||||||
|
## @param docspell.server.integrationEndpoint.httpBasic.enabled Whether integration endpoint requests are expected to use http basic auth when uploading files
|
||||||
|
## @param docspell.server.integrationEndpoint.httpBasic.credentials.user The username for httpBasic authentication
|
||||||
|
## @param docspell.server.integrationEndpoint.httpBasic.credentials.password The password for the httpBasic authentication
|
||||||
|
## @param docspell.server.integrationEndpoint.httpBasic.existingSecret.name Name of an existing Kubernetes secret that contains the httpBasic credentials
|
||||||
|
## @param docspell.server.integrationEndpoint.httpBasic.existingSecret.usernameKey The key inside the existing Kubernetes secret that contains the username for httpBasic
|
||||||
|
## @param docspell.server.integrationEndpoint.httpBasic.existingSecret.passwordKey The key inside the existing Kubernetes secret that contains the password for httpBasic
|
||||||
|
httpBasic:
|
||||||
|
enabled: false
|
||||||
|
realm: "Docspell Integration"
|
||||||
|
credentials:
|
||||||
|
# username: "docspell-int"
|
||||||
|
# password: "docspell-int"
|
||||||
|
# existingSecret:
|
||||||
|
# name: "http-basic-secret-name"
|
||||||
|
# usernameKey: "username-key-inside-secret"
|
||||||
|
# passwordKey: "password-key-inside-secret"
|
||||||
|
## @param doscpell.server.integrationEndpoint.httpHeader.enabled Whether integration endpoint requests are expected to supply some specific header when uploading files
|
||||||
|
## @param docpsell.server.integrationEndpoint.httpHeader.headerName The name of the header that has to be included in the integration endpoint request
|
||||||
|
## @param docspell.server.integrationEndpoint.httpHeader.headerValue.value The header value that is expected to be included in the integration endpoint request
|
||||||
|
## @param docspell.server.integrationEndpojnt.httpHeader.headerValue.existingSecret.name The name of an existing Kubernetes secret that contains the value expected to be included in the integration endpoint request
|
||||||
|
## @param docspell.server.integrationEndpojnt.httpHeader.headerValue.existingSecret.key The key inside of an existing Kubernetes secret that contains the value expected to be included in the integration endpoint
|
||||||
|
httpHeader:
|
||||||
|
enabled: false
|
||||||
|
headerName: "Docspell-Integration"
|
||||||
|
headerValue:
|
||||||
|
# value: "SomeSecret"
|
||||||
|
# existingSecret:
|
||||||
|
# name: "my-existing-secret"
|
||||||
|
# key: "header-value-key-inside-secret"
|
||||||
|
## @param docspell.server.adminEndpoint.enabled Whether to enable the special administration endpoint. A secret value or existing secret containing the value has to be supplied when enabled
|
||||||
|
## @param docspell.server.adminEndpoint.secret.value Value for the administration endpoint
|
||||||
|
## @param docspell.server.adminEndpoint.existingSecret.name The name of an existing Kubernetes secret that contains the value for the admin endpoint
|
||||||
|
## @param docspell.server.adminEndpoint.existingSecret.key The key inside of an existing Kubernetes secret that contains the value for the admin endpoint
|
||||||
|
adminEndpoint:
|
||||||
|
enabled: false
|
||||||
|
# secret:
|
||||||
|
# value: "test"
|
||||||
|
# existingSecret:
|
||||||
|
# name: "my-existing-secret"
|
||||||
|
# key: "admin-key-inside-secret"
|
||||||
|
|
||||||
|
## @param docspell.server.backend.mailDebug Enable or disabling debugging for e-mail related functionality
|
||||||
|
backend:
|
||||||
|
mailDebug: false
|
||||||
|
## @param docspell.server.backend.databaseSchema.runMainMigrations Whether to run mian database migrations
|
||||||
|
## @param docspell.server.backend.databaseSchema.runFixupMigrations Whether to run the fixup migrations
|
||||||
|
## @param docspell.server.backend.databaseSchema.repairSchema Use with care. This repairs all migrations in the datbase by updating their checksums and removing failed migrations
|
||||||
|
databaseSchema:
|
||||||
|
runMainMigrations: true
|
||||||
|
runFixupMigrations: true
|
||||||
|
repairSchema: false
|
||||||
|
## @param docspell.server.backend.signup.mode The mode defines if new users can signup or not (open, invite, closed)
|
||||||
|
## @param docspell.server.backend.signup.newInvitePassword.value If mode is 'invite', a password must be provided to generate invitation keys
|
||||||
|
## @param docspell.server.backend.signup.newInvitePassword.existingSecret.name The name of an existing Kubernetes secret that contains the invitation generation password
|
||||||
|
## @param docspell.server.backend.signup.newINvitePassword.existingSecret.key The key inside of an existing Kubernetes secret that contains the invitation generation password
|
||||||
|
## @param docspell.server.backend.signup.inviteTime If mode is 'invite', this is the period an invitation token is considered valid
|
||||||
|
signup:
|
||||||
|
mode: open
|
||||||
|
newInvitePassword:
|
||||||
|
# value: asdf
|
||||||
|
# existingSecret:
|
||||||
|
# name: "my-existing-secret"
|
||||||
|
# key: "invite-password-key"
|
||||||
|
inviteTime: "3 days"
|
||||||
|
## @param docspell.joex.appId Id of the node
|
||||||
|
## @param docspell.joex.mailDebug Enable or disabling debugging for e-mail related functionality
|
||||||
|
joex:
|
||||||
|
appId:
|
||||||
|
mailDebug: false
|
||||||
|
## @param docspell.joex.bind.address The address joex binds to. Should be set to `0.0.0.0`, as otherwise it'll refuse connections
|
||||||
|
## @param docspell.joex.bind.port The port joex binds to. Make sure to set the same port for the service
|
||||||
|
bind:
|
||||||
|
address: 0.0.0.0
|
||||||
|
port: 7878
|
||||||
|
## @param docspell.joex.logging.format Format of log messages. Can be json,. Logfmt, Fancy or Plain
|
||||||
|
## @param docspell.joex.logging.minimumLevel Minimum level of the log. From lowest to highest: Trace, Debug, Info, Warn, Error
|
||||||
|
logging:
|
||||||
|
format: "fancy"
|
||||||
|
minimumLevel: "Warn"
|
||||||
|
## @param docspell.joex.databaseSchema.runMainMigrations Whether to run mian database migrations
|
||||||
|
## @param docspell.joex.databaseSchema.runFixupMigrations Whether to run the fixup migrations
|
||||||
|
## @param docspell.joex.databaseSchema.repairSchema Use with care. This repairs all migrations in the datbase by updating their checksums and removing failed migrations
|
||||||
|
databaseSchema:
|
||||||
|
runMainMigrations: true
|
||||||
|
runFixupMigrations: true
|
||||||
|
repairSchema: false
|
||||||
|
## @param doscpell.joex.scheduler.name Each scheduler needs a unique name. This defaults to the node name
|
||||||
|
## @param docspell.joex.scheduler.poolSize Number of processing allowed in parallel
|
||||||
|
## @param docspell.joex.scheduler.countingScheme A counting s cheme determines the ratio of how high- and low-prio jobs are run
|
||||||
|
## @param docspell.joex.scheduler.retries How often a failed job should be retried until it enters faield state
|
||||||
|
## @param docspell.joex.scheduler.retryDelay The delay until the next try is performed for a failed job
|
||||||
|
## @param docspell.joex.scheduler.logBufferSize The queue size of log statements from a job
|
||||||
|
## @param docspell.joex.scheduler.wakeupPeriod If no job is left un the queue, the scheduler will wait until a notify is requested
|
||||||
|
scheduler:
|
||||||
|
name:
|
||||||
|
poolSize: 1
|
||||||
|
countingScheme: "4,1"
|
||||||
|
retries: 2
|
||||||
|
retryDelay: "1 minute"
|
||||||
|
logBufferSize: 500
|
||||||
|
wakeupPeriod: "30 minutes"
|
||||||
|
## @param docspell.joex.periodicScheduler.name Each scheduler needs a unique name. This defaults to the node name
|
||||||
|
## @param docspell.joex.periodicScheduler.wakeupPeriod A fallback to start looking for due periodic tasks regularly
|
||||||
|
periodicScheduler:
|
||||||
|
name:
|
||||||
|
wakeupPeriod: "10 minutes"
|
||||||
|
## @param docspell.joex.userTasks.scanMailbox.maxFolders A limit of how many folders to scan through. If a user configures more than this, only upto this limit folders are scanned and a warning is logged
|
||||||
|
## @param docspell.joex.userTasks.scanMailbox.mailChunkSize How many mails (headers only) to retreieve in one chunk
|
||||||
|
## @param docspell.joex.userTasks.scanMailbox.maxMails A limit on how many mails to process in one job run. This is meant to avoid too heavy resource allocation to one user/collective
|
||||||
|
userTasks:
|
||||||
|
scanMailbox:
|
||||||
|
maxFolders: 50
|
||||||
|
mailChunkSize: 50
|
||||||
|
maxMails: 500
|
||||||
|
## @param docspell.joex.houseKeeping.schedule When the house keeping tasks execute. Default is to run every week
|
||||||
|
## @param docspell.joex.houseKeeping.cleanupInvites.enabled Whether to remove invation keys that have been created but not used
|
||||||
|
## @param docspell.joex.houseKeeping.cleanupInvites.olderThan The minimum age of invites to be deleted
|
||||||
|
## @param docspell.joex.houseKeeping.cleanupRememberMe.enabled Whether to remove expired remember-me tokens
|
||||||
|
## @param docspell.joex.houseKeeping.cleanupRememberMe.olderThan The minimum age of tokens to be deleted
|
||||||
|
## @param docspell.joex.houseKeeping.cleanupJobs.enabled Whether to delete old job log files. Logs are already stored in the database
|
||||||
|
## @param docspell.joex.houseKeeping.cleanupJobs.olderThan The minimum age of jobs to delete
|
||||||
|
## @param docspell.joex.houseKeeping.cleanupJobs.deleteBatch how many jobs are deleted in one transaction
|
||||||
|
## @param docspell.joex.houseKeeping.cleanupDownloads.enabled Whether to delete cached zip files for past downloads
|
||||||
|
## @param docspell.joex.houseKeeping.cleanupDownloads.olderThan The minimum age of a download file to be deleted
|
||||||
|
## @param docspell.joex.houseKeeping.cleanupNodes.enabled Whether to delete node entries that are not reachable anymore
|
||||||
|
## @param docspell.joex.houseKeeping.cleanupNodes.minNotFound How often the node must be unreachable, before it's removed
|
||||||
|
## @param docspell.joex.houseKeeping.integrityCheck.enabled Whether to check all files against their checksum
|
||||||
|
houseKeeping:
|
||||||
|
schedule: "Sun *-*-* 00:00:00 UTC"
|
||||||
|
cleanupInvites:
|
||||||
|
enabled: true
|
||||||
|
olderThan: "30 days"
|
||||||
|
cleanupRememberMe:
|
||||||
|
enabled: true
|
||||||
|
olderThan: "30 days"
|
||||||
|
cleanupJobs:
|
||||||
|
enabled: true
|
||||||
|
olderThan: "30 days"
|
||||||
|
deleteBatch: 100
|
||||||
|
cleanupDownloads:
|
||||||
|
enabled: true
|
||||||
|
olderThan: "14 days"
|
||||||
|
cleanupNodes:
|
||||||
|
enabled: true
|
||||||
|
minNotFound: 2
|
||||||
|
integrityCheck:
|
||||||
|
enabled: true
|
||||||
|
## @param docspell.joex.updateCheck.enabled Whether to periodically check for new releases of docspell
|
||||||
|
## @param docspell.joex.updateCheck.testRun Sends the mail without checking the latest release
|
||||||
|
## @param docspell.joex.updateCheck.schedule When the update check should execute. Default is to run every week
|
||||||
|
## @param docspell.joex.updateCheck.senderAccount An account id in the form of `collective/user` This user account must have at least one valid SMTP settings which are used to send the mail
|
||||||
|
## @param docspell.joex.updateCheck.smtpId The SMTP conenction id that should be used for sending the mail
|
||||||
|
## @param docspell.joex.updateCheck.recipients A list of recipient e-mail addresses
|
||||||
|
## @param docspell.joex.updateCheck.subject The subject of the mail. If supports the same variables as the body
|
||||||
|
## @param docspell.joex.updateCheck.body The body of the mail. Subject and body can contain these variables which are replaced: latestVersion, currentVersion, releasedAt. The body is processed as markdown after the variables have been replaced
|
||||||
|
updateCheck:
|
||||||
|
enabled: false
|
||||||
|
testRun: false
|
||||||
|
schedule: "Sun *-*-* 00:00:00 UTC"
|
||||||
|
senderAccount:
|
||||||
|
smtpId:
|
||||||
|
recipients: []
|
||||||
|
# - john.doe@gmail.com
|
||||||
|
subject: "Docspell {{ latestVersion }} is available"
|
||||||
|
body: |-
|
||||||
|
Hello,
|
||||||
|
|
||||||
|
You are currently running Docspell {{ currentVersion }}. Version *{{ latestVersion }}*
|
||||||
|
is now available, which was released on {{ releasedAt }}. Check the release page at:
|
||||||
|
|
||||||
|
<https://github.com/eikek/docspell/releases/latest>
|
||||||
|
|
||||||
|
Have a nice day!
|
||||||
|
|
||||||
|
Docpell Update Check
|
||||||
|
## @param docspell.joex.convert.htmlConverter Which HTML->PDF converter command to use. (wkhtmlpdf, weasyprint)
|
||||||
|
convert:
|
||||||
|
htmlConverter: wkhtmlpdf
|
||||||
|
## @param docspell.joex.fullTextSearch.migration.indexAllChunk Chink size to use when indexing data from the database. This many attachments are loaded into memory and pushed to the full-text index
|
||||||
|
fullTextSearch:
|
||||||
|
migration:
|
||||||
|
indexAllChink: 10
|
||||||
|
|
||||||
|
## @section Ingress
|
||||||
|
#
|
||||||
|
## @param ingress.enabled Enable ingress
|
||||||
|
## @param ingress.className Ingress class name
|
||||||
|
## @param ingress.annotations Ingress annotations
|
||||||
|
## @param ingress.hosts[0].host Default Ingress host
|
||||||
|
## @parem ingress.hosts[0].paths[0].path Default Ingress path
|
||||||
|
## @param ingress.hosts[0].paths[0].pathType Ingress path type
|
||||||
|
## @param ingress.tls Ingress tls settings
|
||||||
|
## @extra ingress.apiVersion specify APIVersion of ingress object. Mostly would only be used for argocd
|
||||||
|
ingress:
|
||||||
|
enabled: true
|
||||||
|
className:
|
||||||
|
annotations:
|
||||||
|
# Set a proper upload size, so that large documents can be uploaded as well.
|
||||||
|
nginx.ingress.kubernetes.io/proxy-body-size: 10G
|
||||||
|
# This is needed for the webcocket connections to work.
|
||||||
|
nginx.ingress.kubernetes.io/configuration-snippet: |
|
||||||
|
proxy_set_header Upgrade "websocket";
|
||||||
|
proxy_set_header Connection "Upgrade";
|
||||||
|
# kubernetes.io/ingress.class: nginx
|
||||||
|
# kubernetes.io/tls-acme: "true"
|
||||||
|
hosts:
|
||||||
|
- host: docspell.example.com
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
tls: []
|
||||||
|
# - secretName: chart-exmaple-tls
|
||||||
|
# hosts:
|
||||||
|
# - docspell.example.com
|
||||||
|
|
||||||
|
## @section ServiceAccount
|
||||||
|
#
|
||||||
|
## @param serviceAccount.create Enable the creation of a ServiceAccount for docspell
|
||||||
|
## @param serviceAccount.name Name of the created ServieAccount, defauts to release name.
|
||||||
|
## @param serviceAccount.automountServiceAccountToken Enable/disable auto mounting of the service account token
|
||||||
|
## @param serviceAccount.imagePullSecrets Image pull secrets, available to the ServiceAccount
|
||||||
|
## @param serviceAccount.annotations Custom annotations for the ServiceAccount
|
||||||
|
## @param serviceAccount.labels Custom labels for the ServiceAccount
|
||||||
|
serviceAccount:
|
||||||
|
create: false
|
||||||
|
name: ""
|
||||||
|
automountServiceAccountToken: false
|
||||||
|
imagePullSecrets: []
|
||||||
|
# - name: private-registry-access
|
||||||
|
annotations: {}
|
||||||
|
labels: {}
|
||||||
|
|
||||||
|
## @section Restserver
|
||||||
|
#
|
||||||
|
## @param restserver.replicaCount Number of replicas for the restserver deployment
|
||||||
|
## @param restserver.podAnnotations Annotations for the solr pod
|
||||||
|
restserver:
|
||||||
|
replicaCount: 1
|
||||||
|
podAnnotations: {}
|
||||||
|
## @param restserver.image.registry Image registry, e.g. gcr.io,docker.io
|
||||||
|
## @param restserver.image.repository Image to start for this pod
|
||||||
|
## @param restserver.image.tag Visit [Image tag](https://hub.docker.com/r/docspell/restserver/tags?page=1&ordering=last_updated). Defaults to `appVersion` within Chart.yaml.
|
||||||
|
## @param restserver.image.pullPolicy Image pull policy
|
||||||
|
image:
|
||||||
|
registry: ""
|
||||||
|
repository: docspell/restserver
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
|
||||||
|
## @param restserver.service.type Kubernetes service type for solr traffic
|
||||||
|
## @param restserver.service.port Port number for solr traffic
|
||||||
|
## @param restserver.service.annotations Solr service annotations
|
||||||
|
## @param restserver.service.labels Solr service additional labels
|
||||||
|
service:
|
||||||
|
type: ClusterIP
|
||||||
|
port: 7880
|
||||||
|
annotations: {}
|
||||||
|
labels: {}
|
||||||
|
|
||||||
|
## @param restserver.deployment.labels Labels for the restserver deployment
|
||||||
|
## @param restserver.deployment.annotations Annotations for the restserver deployment to be created
|
||||||
|
## @param restserver.deployment.terminationGracePeriodSeconds How long to wait until forcefully kill the restserver pod
|
||||||
|
## @param restserver.deployment.env Additional environment variables to pass to the restserver container
|
||||||
|
deployment:
|
||||||
|
labels: {}
|
||||||
|
annotations: {}
|
||||||
|
terminationGracePeriodSeconds: 60
|
||||||
|
env: []
|
||||||
|
|
||||||
|
## @param restserver.strategy.type Strategy type
|
||||||
|
## @param restserver.strategy.rollingUpdate.maxSurge maxSurge
|
||||||
|
## @param restserver.strategy.rollingUpdate.maxUnavailable maxUnavailable
|
||||||
|
strategy:
|
||||||
|
type: "RollingUpdate"
|
||||||
|
rollingUpdate:
|
||||||
|
maxSurge: "100%"
|
||||||
|
maxUnavailable: 0
|
||||||
|
## @param restserver.resources.limits.cpu CPU limit for the restserver pod
|
||||||
|
## @param restserver.resources.limits.memory Memory limit for the restserver pod
|
||||||
|
## @param restserver.resources.requests.cpu Requested cpu for the restserver pod
|
||||||
|
## @param restserver.resources.requests.memory Requested memory for the restserver pod
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 1
|
||||||
|
memory: 1Gi
|
||||||
|
requests:
|
||||||
|
cpu: 0.5
|
||||||
|
memory: 512Mi
|
||||||
|
|
||||||
|
## @section Joex
|
||||||
|
#
|
||||||
|
## @param joex.replicaCount Number of replicas for the joex deployment
|
||||||
|
## @param joex.podAnnotations Annotations for the solr pod
|
||||||
|
## @param joex.args Additional arguments that should be passed to the pod
|
||||||
|
joex:
|
||||||
|
replicaCount: 1
|
||||||
|
podAnnotations: {}
|
||||||
|
additionalArgs:
|
||||||
|
- -J-Xmx3G
|
||||||
|
## @param joex.image.registry Image registry, e.g. gcr.io,docker.io
|
||||||
|
## @param joex.image.repository Image to start for this pod
|
||||||
|
## @param joex.image.tag Visit [Image tag](https://hub.docker.com/r/docspell/joex/tags?page=1&ordering=last_updated). Defaults to `appVersion` within Chart.yaml.
|
||||||
|
## @param joex.image.pullPolicy Image pull policy
|
||||||
|
image:
|
||||||
|
registry: ""
|
||||||
|
repository: docspell/joex
|
||||||
|
tag: ""
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
|
||||||
|
## @param joex.service.type Kubernetes service type for solr traffic
|
||||||
|
## @param joex.service.port Port number for solr traffic
|
||||||
|
## @param joex.service.annotations Solr service annotations
|
||||||
|
## @param joex.service.labels Solr service additional labels
|
||||||
|
service:
|
||||||
|
type: ClusterIP
|
||||||
|
port: 7878
|
||||||
|
annotations: {}
|
||||||
|
labels: {}
|
||||||
|
|
||||||
|
## @param joex.deployment.labels Labels for the restserver deployment
|
||||||
|
## @param joex.deployment.annotations Annotations for the restserver deployment to be created
|
||||||
|
## @param joex.deployment.terminationGracePeriodSeconds How long to wait until forcefully kill the restserver pod
|
||||||
|
## @param joex.deployment.env Additional environment variables to pass to the restserver container
|
||||||
|
deployment:
|
||||||
|
labels: {}
|
||||||
|
annotations: {}
|
||||||
|
terminationGracePeriodSeconds: 60
|
||||||
|
env: []
|
||||||
|
|
||||||
|
## @param joex.strategy.type Strategy type
|
||||||
|
## @param joex.strategy.rollingUpdate.maxSurge maxSurge
|
||||||
|
## @param joex.strategy.rollingUpdate.maxUnavailable maxUnavailable
|
||||||
|
strategy:
|
||||||
|
type: "RollingUpdate"
|
||||||
|
rollingUpdate:
|
||||||
|
maxSurge: "100%"
|
||||||
|
maxUnavailable: 0
|
||||||
|
## @param joex.resources.limits.cpu CPU limit for the joex pod
|
||||||
|
## @param joex.resources.limits.memory Memory limit for the joex pod. Make sure to change the `-J-Xmx` argument to reflect the max-memory setting
|
||||||
|
## @param joex.resources.requests.cpu Requested cpu for the joex pod
|
||||||
|
## @param joex.resources.requests.memory Requested memory for the joex pod
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 1
|
||||||
|
memory: 3Gi
|
||||||
|
requests:
|
||||||
|
cpu: 0.5
|
||||||
|
memory: 1.5Gi
|
||||||
|
## @section solr
|
||||||
|
#
|
||||||
|
## @param solr.enabled Enable Apache Solr for full-text-search
|
||||||
|
## @param solr.podAnnotations Annotations for the solr pod
|
||||||
|
## @param solr.nodeSelector NodeSelector for the solr statefulset
|
||||||
|
## @param solr.affinity Affinity for the solr statefulset
|
||||||
|
|
||||||
|
## @param solr.livenessProbe
|
||||||
|
|
||||||
|
solr:
|
||||||
|
enabled: true
|
||||||
|
podAnnotations: {}
|
||||||
|
nodeSelector: {}
|
||||||
|
affinity: {}
|
||||||
|
## @param solr.image.registry Image registry, e.g. gcr.io,docker.io
|
||||||
|
## @param solr.image.repository Image to start for this pod
|
||||||
|
## @param solr.image.tag Visit [Image tag](https://hub.docker.com/_/solr/tags?page=1&ordering=last_updated). Default is `9`.
|
||||||
|
## @param solr.image.pullPolicy Image pull policy
|
||||||
|
image:
|
||||||
|
registry: ""
|
||||||
|
repository: solr
|
||||||
|
tag: "9"
|
||||||
|
pullPolicy: IfNotPresent
|
||||||
|
## @param solr.service.type Kubernetes service type for solr traffic
|
||||||
|
## @param solr.service.port Port number for solr traffic
|
||||||
|
## @param solr.service.annotations Solr service annotations
|
||||||
|
## @param solr.service.labels Solr service additional labels
|
||||||
|
service:
|
||||||
|
type: ClusterIP
|
||||||
|
port: 8983
|
||||||
|
annotations: {}
|
||||||
|
labels: {}
|
||||||
|
## @param solr.livenessProbe.enabled Enable liveness probe
|
||||||
|
## @param solr.livenessProbe.httpGet.port Port for the http get request
|
||||||
|
## @param solr.livenessProbe.httpGet.path URL path for the http get request
|
||||||
|
## @param solr.livenessProbe.initialDelaySeconds Initial delay before liveness probe is initiated
|
||||||
|
## @param solr.livenessProbe.periodSeconds Period for liveness probe
|
||||||
|
## @param solr.livenessProbe.timoutSeconds Timeout for liveness probe
|
||||||
|
livenessProbe:
|
||||||
|
enabled: true
|
||||||
|
httpGet:
|
||||||
|
port: 8983
|
||||||
|
path: /solr/admin/info/system
|
||||||
|
initialDelaySeconds: 60
|
||||||
|
periodSeconds: 10
|
||||||
|
timeoutSeconds: 5
|
||||||
|
## @param solr.readinessProbe.enabled Enable readiness probe
|
||||||
|
## @param solr.readinessProbe.httpGet.port Port for the http get request
|
||||||
|
## @param solr.readinessProbe.httpGet.path URL path for the http get request
|
||||||
|
## @param solr.readinessProbe.initialDelaySeconds Initial delay before readiness probe is initiated
|
||||||
|
## @param solr.readinessProbe.periodSeconds Period for readiness probe
|
||||||
|
## @param solr.readinessProbe.timoutSeconds Timeout for readiness probe
|
||||||
|
readinessProbe:
|
||||||
|
enabled: true
|
||||||
|
httpGet:
|
||||||
|
path: /solr/admin/info/system
|
||||||
|
port: 8983
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 5
|
||||||
|
timeoutSeconds: 1
|
||||||
|
|
||||||
|
## @param solr.resources Kubernetes resouces for solr
|
||||||
|
resources:
|
||||||
|
{}
|
||||||
|
|
||||||
|
## @param solr.initContainers.resources.limits Kubernetes limits for solr init containers
|
||||||
|
## @param solr.initContainers.resources.requests.cpu cpu resource limits for solr init containers
|
||||||
|
## @param solr.initContainers.resources.requests.memory memory limits for solr init containers
|
||||||
|
initContainers:
|
||||||
|
resources:
|
||||||
|
limits: {}
|
||||||
|
requests:
|
||||||
|
cpu: 100m
|
||||||
|
memory: 128Mi
|
||||||
|
|
||||||
|
## @param solr.statefulSet.labels Labels for the solr statefulset
|
||||||
|
## @param solr.statefulSet.annotations Annotations for the solr statefulset to be created
|
||||||
|
## @param solr.statefulSet.terminationGracePeriodSeconds How long to wait until forcefully kill the solr pod
|
||||||
|
## @param solr.statefulSet.env Additional environment variables to pass to the solr container
|
||||||
|
statefulSet:
|
||||||
|
labels: {}
|
||||||
|
annotations: {}
|
||||||
|
terminationGracePeriodSeconds: 60
|
||||||
|
env: []
|
||||||
|
# - name: VARIABLE
|
||||||
|
# value: my-value
|
||||||
|
|
||||||
|
## @param solr.persistence.enabled Enable persistence storage for solr
|
||||||
|
## @param solr.persistence.claimName Use an existing claim to store solr index
|
||||||
|
## @param solr.persistence.size Size for persistence to store solr index
|
||||||
|
## @param solr.persistence.accessModes ACcessMode for persistence
|
||||||
|
## @param solr.persistence.storageClass Name of the storage class to use
|
||||||
|
## @param solr.persistence.volumeName Name of persistent volume in PVC
|
||||||
|
## @param solr.persistence.annotations.helm/sh/resource-policy Resource policy for the persistence volume claim
|
||||||
|
persistence:
|
||||||
|
enabled: true
|
||||||
|
claimName: solr-data
|
||||||
|
size: 5Gi
|
||||||
|
accessModes: ["ReadWriteOnce"]
|
||||||
|
storageClass:
|
||||||
|
volumeName: ""
|
||||||
|
annotations:
|
||||||
|
helm.sh/resource-policy: keep
|
||||||
|
|
||||||
|
## @section PostgreSQL
|
||||||
|
#
|
||||||
|
## @param postgresql.enabled Enable PostgreSQL
|
||||||
|
## @param postgresql.global.postgresql.auth.password Password for the `dbname` user (overrides `auth.password`)
|
||||||
|
## @param postgresql.global.postgresql.auth.database Name for a custom database to create (overrides `auth.database`)
|
||||||
|
## @param postgresql.global.postgresql.auth.username Name for a custom user to create (overrides `auth.username`)
|
||||||
|
## @param postgresql.global.postgresql.auth.existingSecret Name of an existing Kubernetes secret that contains the postgresql credentials. `auth.password` will be ignored and picked up from this secret
|
||||||
|
## @param postgresql.global.postgresql.auth.secretKeys.adminPasswordKey Name of key in existing secret to use for PostgreSQL credentials.
|
||||||
|
## @param postgresql.global.postgresql.auth.secretKeys.userPasswordKey Name of key in existing secret to use for PostgreSQL credentials.
|
||||||
|
## @param postgresql.global.postgresql.service.ports.postgresql PostgreSQL service port (overrides `service.ports.postgresql`)
|
||||||
|
## @param postgresql.primary.persistence.size PVC Storage Request for PostgreSQL volume
|
||||||
|
postgresql:
|
||||||
|
enabled: true
|
||||||
|
global:
|
||||||
|
postgresql:
|
||||||
|
auth:
|
||||||
|
database: dbname
|
||||||
|
username: dbuser
|
||||||
|
password: dbpass
|
||||||
|
# existingSecret: postgres-secret
|
||||||
|
# secretKeys:
|
||||||
|
# adminPasswordKey: postgres-password
|
||||||
|
# userPasswordKey: password
|
||||||
|
service:
|
||||||
|
postgresql: 5432
|
||||||
|
primary:
|
||||||
|
persistence:
|
||||||
|
size: 10Gi
|
||||||
|
annotations:
|
||||||
|
helm.sh/resource-policy: keep
|
@@ -38,9 +38,9 @@ final case class AddonArchive(url: LenientUri, name: String, version: String) {
|
|||||||
Files[F].createDirectories(target) *>
|
Files[F].createDirectories(target) *>
|
||||||
reader(url)
|
reader(url)
|
||||||
.through(Zip[F](logger.some).unzip(glob = glob, targetDir = target.some))
|
.through(Zip[F](logger.some).unzip(glob = glob, targetDir = target.some))
|
||||||
|
.evalTap(_ => Directory.unwrapSingle[F](logger, target))
|
||||||
.compile
|
.compile
|
||||||
.drain
|
.drain
|
||||||
.flatTap(_ => Directory.unwrapSingle[F](logger, target))
|
|
||||||
.as(target)
|
.as(target)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -110,7 +110,7 @@ private[addons] object RunnerUtil {
|
|||||||
): F[AddonResult] =
|
): F[AddonResult] =
|
||||||
for {
|
for {
|
||||||
stdout <-
|
stdout <-
|
||||||
if (ctx.meta.parseResult) CollectOut.buffer[F]
|
if (ctx.meta.options.exists(_.collectOutput)) CollectOut.buffer[F]
|
||||||
else CollectOut.none[F].pure[F]
|
else CollectOut.none[F].pure[F]
|
||||||
cmdResult <- SysExec(cmd, logger, ctx.baseDir.some)
|
cmdResult <- SysExec(cmd, logger, ctx.baseDir.some)
|
||||||
.flatMap(
|
.flatMap(
|
||||||
@@ -135,7 +135,7 @@ private[addons] object RunnerUtil {
|
|||||||
out <- stdout.get
|
out <- stdout.get
|
||||||
_ <- logger.debug(s"Addon stdout: $out")
|
_ <- logger.debug(s"Addon stdout: $out")
|
||||||
result = Option
|
result = Option
|
||||||
.when(ctx.meta.parseResult && out.nonEmpty)(
|
.when(ctx.meta.options.exists(_.collectOutput) && out.nonEmpty)(
|
||||||
JsonParser
|
JsonParser
|
||||||
.decode[AddonOutput](out)
|
.decode[AddonOutput](out)
|
||||||
.fold(AddonResult.decodingError, AddonResult.success)
|
.fold(AddonResult.decodingError, AddonResult.success)
|
||||||
|
Binary file not shown.
@@ -9,7 +9,7 @@ package docspell.addons
|
|||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.syntax.option._
|
import cats.syntax.option._
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common.UrlReader
|
||||||
import docspell.logging.TestLoggingConfig
|
import docspell.logging.TestLoggingConfig
|
||||||
|
|
||||||
import munit._
|
import munit._
|
||||||
@@ -42,20 +42,10 @@ class AddonArchiveTest extends CatsEffectSuite with TestLoggingConfig with Fixtu
|
|||||||
} yield ()
|
} yield ()
|
||||||
}
|
}
|
||||||
|
|
||||||
tempDir.test("read archive from zip with yaml only") { dir =>
|
|
||||||
for {
|
|
||||||
aa <- AddonArchive.read[IO](singleFileAddonUrl, UrlReader.defaultReader[IO], None)
|
|
||||||
_ = assertEquals(aa.version, "0.7.0")
|
|
||||||
path <- aa.extractTo(UrlReader.defaultReader[IO], dir)
|
|
||||||
read <- AddonArchive.read[IO](aa.url, UrlReader.defaultReader[IO], path.some)
|
|
||||||
_ = assertEquals(aa, read)
|
|
||||||
} yield ()
|
|
||||||
}
|
|
||||||
|
|
||||||
tempDir.test("Read generated addon from path") { dir =>
|
tempDir.test("Read generated addon from path") { dir =>
|
||||||
AddonGenerator.successAddon("mini-addon").use { addon =>
|
AddonGenerator.successAddon("mini-addon").use { addon =>
|
||||||
for {
|
for {
|
||||||
archive <- IO(AddonArchive(addon.url, "test-addon", "0.1.0"))
|
archive <- IO(AddonArchive(addon.url, "", ""))
|
||||||
path <- archive.extractTo[IO](UrlReader.defaultReader[IO], dir)
|
path <- archive.extractTo[IO](UrlReader.defaultReader[IO], dir)
|
||||||
|
|
||||||
read <- AddonArchive.read[IO](addon.url, UrlReader.defaultReader[IO], path.some)
|
read <- AddonArchive.read[IO](addon.url, UrlReader.defaultReader[IO], path.some)
|
||||||
|
@@ -142,7 +142,7 @@ class AddonExecutorTest extends CatsEffectSuite with Fixtures with TestLoggingCo
|
|||||||
AddonExecutionResult.executionResultMonoid
|
AddonExecutionResult.executionResultMonoid
|
||||||
.combine(
|
.combine(
|
||||||
AddonExecutionResult.empty,
|
AddonExecutionResult.empty,
|
||||||
AddonExecutionResult(Nil, pure = true)
|
AddonExecutionResult(Nil, true)
|
||||||
)
|
)
|
||||||
.pure
|
.pure
|
||||||
)
|
)
|
||||||
|
@@ -27,9 +27,9 @@ object AddonGenerator {
|
|||||||
): Resource[IO, AddonArchive] =
|
): Resource[IO, AddonArchive] =
|
||||||
output match {
|
output match {
|
||||||
case None =>
|
case None =>
|
||||||
generate(name, version, collectOutput = false)("exit 0")
|
generate(name, version, false)("exit 0")
|
||||||
case Some(out) =>
|
case Some(out) =>
|
||||||
generate(name, version, collectOutput = true)(
|
generate(name, version, true)(
|
||||||
s"""
|
s"""
|
||||||
|cat <<-EOF
|
|cat <<-EOF
|
||||||
|${out.asJson.noSpaces}
|
|${out.asJson.noSpaces}
|
||||||
@@ -77,9 +77,8 @@ object AddonGenerator {
|
|||||||
meta = AddonMeta.Meta(name, version, None),
|
meta = AddonMeta.Meta(name, version, None),
|
||||||
triggers = Set(AddonTriggerType.ExistingItem: AddonTriggerType).some,
|
triggers = Set(AddonTriggerType.ExistingItem: AddonTriggerType).some,
|
||||||
args = None,
|
args = None,
|
||||||
runner = AddonMeta
|
runner =
|
||||||
.Runner(None, None, AddonMeta.TrivialRunner(enable = true, "addon.sh").some)
|
AddonMeta.Runner(None, None, AddonMeta.TrivialRunner(true, "addon.sh").some).some,
|
||||||
.some,
|
|
||||||
options =
|
options =
|
||||||
AddonMeta.Options(networking = !collectOutput, collectOutput = collectOutput).some
|
AddonMeta.Options(networking = !collectOutput, collectOutput = collectOutput).some
|
||||||
)
|
)
|
||||||
|
@@ -35,13 +35,4 @@ class AddonMetaTest extends CatsEffectSuite with TestLoggingConfig with Fixtures
|
|||||||
_ = assertEquals(meta, dummyAddonMeta)
|
_ = assertEquals(meta, dummyAddonMeta)
|
||||||
} yield ()
|
} yield ()
|
||||||
}
|
}
|
||||||
|
|
||||||
test("parse yaml with defaults") {
|
|
||||||
val yamlStr = """meta:
|
|
||||||
| name: "test"
|
|
||||||
| version: "0.1.0"
|
|
||||||
|""".stripMargin
|
|
||||||
val meta = AddonMeta.fromYamlString(yamlStr).fold(throw _, identity)
|
|
||||||
assert(meta.parseResult)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@@ -31,9 +31,6 @@ trait Fixtures extends TestLoggingConfig { self: CatsEffectSuite =>
|
|||||||
val miniAddonUrl =
|
val miniAddonUrl =
|
||||||
LenientUri.fromJava(getClass.getResource("/minimal-addon.zip"))
|
LenientUri.fromJava(getClass.getResource("/minimal-addon.zip"))
|
||||||
|
|
||||||
val singleFileAddonUrl =
|
|
||||||
LenientUri.fromJava(getClass.getResource("/docspell-addon-single-file.zip"))
|
|
||||||
|
|
||||||
val dummyAddonMeta =
|
val dummyAddonMeta =
|
||||||
AddonMeta(
|
AddonMeta(
|
||||||
meta =
|
meta =
|
||||||
@@ -43,13 +40,13 @@ trait Fixtures extends TestLoggingConfig { self: CatsEffectSuite =>
|
|||||||
),
|
),
|
||||||
None,
|
None,
|
||||||
runner = Runner(
|
runner = Runner(
|
||||||
nix = NixRunner(enable = true).some,
|
nix = NixRunner(true).some,
|
||||||
docker = DockerRunner(
|
docker = DockerRunner(
|
||||||
enable = true,
|
enable = true,
|
||||||
image = None,
|
image = None,
|
||||||
build = "Dockerfile".some
|
build = "Dockerfile".some
|
||||||
).some,
|
).some,
|
||||||
trivial = TrivialRunner(enable = true, "src/addon.sh").some
|
trivial = TrivialRunner(true, "src/addon.sh").some
|
||||||
).some,
|
).some,
|
||||||
options = Options(networking = true, collectOutput = true).some
|
options = Options(networking = true, collectOutput = true).some
|
||||||
)
|
)
|
||||||
@@ -58,7 +55,7 @@ trait Fixtures extends TestLoggingConfig { self: CatsEffectSuite =>
|
|||||||
Path(s"/tmp/target/test-temp")
|
Path(s"/tmp/target/test-temp")
|
||||||
|
|
||||||
val tempDir =
|
val tempDir =
|
||||||
ResourceFunFixture[Path](
|
ResourceFixture[Path](
|
||||||
Resource.eval(Files[IO].createDirectories(baseTempDir)) *>
|
Resource.eval(Files[IO].createDirectories(baseTempDir)) *>
|
||||||
Files[IO]
|
Files[IO]
|
||||||
.tempDirectory(baseTempDir.some, "run-", PosixPermissions.fromOctal("777"))
|
.tempDirectory(baseTempDir.some, "run-", PosixPermissions.fromOctal("777"))
|
||||||
@@ -68,7 +65,7 @@ trait Fixtures extends TestLoggingConfig { self: CatsEffectSuite =>
|
|||||||
runner: RunnerType,
|
runner: RunnerType,
|
||||||
runners: RunnerType*
|
runners: RunnerType*
|
||||||
): AddonExecutorConfig = {
|
): AddonExecutorConfig = {
|
||||||
val nspawn = NSpawn(enabled = false, "sudo", "systemd-nspawn", Duration.millis(100))
|
val nspawn = NSpawn(false, "sudo", "systemd-nspawn", Duration.millis(100))
|
||||||
AddonExecutorConfig(
|
AddonExecutorConfig(
|
||||||
runner = runner :: runners.toList,
|
runner = runner :: runners.toList,
|
||||||
runTimeout = Duration.minutes(2),
|
runTimeout = Duration.minutes(2),
|
||||||
|
@@ -125,7 +125,6 @@ object DateFind {
|
|||||||
case Language.Dutch => dmy.or(ymd).or(mdy)
|
case Language.Dutch => dmy.or(ymd).or(mdy)
|
||||||
case Language.Latvian => dmy.or(lavLong).or(ymd)
|
case Language.Latvian => dmy.or(lavLong).or(ymd)
|
||||||
case Language.Japanese => ymd
|
case Language.Japanese => ymd
|
||||||
case Language.JpnVert => ymd
|
|
||||||
case Language.Hebrew => dmy
|
case Language.Hebrew => dmy
|
||||||
case Language.Lithuanian => ymd
|
case Language.Lithuanian => ymd
|
||||||
case Language.Polish => dmy
|
case Language.Polish => dmy
|
||||||
|
@@ -54,8 +54,6 @@ object MonthName {
|
|||||||
latvian
|
latvian
|
||||||
case Language.Japanese =>
|
case Language.Japanese =>
|
||||||
japanese
|
japanese
|
||||||
case Language.JpnVert =>
|
|
||||||
japanese
|
|
||||||
case Language.Hebrew =>
|
case Language.Hebrew =>
|
||||||
hebrew
|
hebrew
|
||||||
case Language.Lithuanian =>
|
case Language.Lithuanian =>
|
||||||
|
@@ -22,7 +22,7 @@ import munit._
|
|||||||
|
|
||||||
class StanfordNerAnnotatorSuite extends FunSuite with TestLoggingConfig {
|
class StanfordNerAnnotatorSuite extends FunSuite with TestLoggingConfig {
|
||||||
lazy val germanClassifier =
|
lazy val germanClassifier =
|
||||||
new StanfordCoreNLP(Properties.nerGerman(None, highRecall = false))
|
new StanfordCoreNLP(Properties.nerGerman(None, false))
|
||||||
lazy val englishClassifier =
|
lazy val englishClassifier =
|
||||||
new StanfordCoreNLP(Properties.nerEnglish(None))
|
new StanfordCoreNLP(Properties.nerEnglish(None))
|
||||||
|
|
||||||
|
@@ -90,6 +90,6 @@ object Config {
|
|||||||
}
|
}
|
||||||
object Addons {
|
object Addons {
|
||||||
val disabled: Addons =
|
val disabled: Addons =
|
||||||
Addons(enabled = false, allowImpure = false, UrlMatcher.False, UrlMatcher.True)
|
Addons(false, false, UrlMatcher.False, UrlMatcher.True)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -127,7 +127,7 @@ object Login {
|
|||||||
_ <- logF.trace(s"Account lookup: $data")
|
_ <- logF.trace(s"Account lookup: $data")
|
||||||
res <- data match {
|
res <- data match {
|
||||||
case Some(d) if checkNoPassword(d, Set(AccountSource.OpenId)) =>
|
case Some(d) if checkNoPassword(d, Set(AccountSource.OpenId)) =>
|
||||||
doLogin(config, d.account, rememberMe = false)
|
doLogin(config, d.account, false)
|
||||||
case Some(d) if checkNoPassword(d, Set(AccountSource.Local)) =>
|
case Some(d) if checkNoPassword(d, Set(AccountSource.Local)) =>
|
||||||
config.onAccountSourceConflict match {
|
config.onAccountSourceConflict match {
|
||||||
case OnAccountSourceConflict.Fail =>
|
case OnAccountSourceConflict.Fail =>
|
||||||
@@ -145,7 +145,7 @@ object Login {
|
|||||||
AccountSource.OpenId
|
AccountSource.OpenId
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
res <- doLogin(config, d.account, rememberMe = false)
|
res <- doLogin(config, d.account, false)
|
||||||
} yield res
|
} yield res
|
||||||
}
|
}
|
||||||
case _ =>
|
case _ =>
|
||||||
@@ -212,12 +212,7 @@ object Login {
|
|||||||
val okResult: F[Result] =
|
val okResult: F[Result] =
|
||||||
for {
|
for {
|
||||||
_ <- store.transact(RUser.updateLogin(sf.token.account))
|
_ <- store.transact(RUser.updateLogin(sf.token.account))
|
||||||
newToken <- AuthToken.user(
|
newToken <- AuthToken.user(sf.token.account, false, config.serverSecret, None)
|
||||||
sf.token.account,
|
|
||||||
requireSecondFactor = false,
|
|
||||||
config.serverSecret,
|
|
||||||
None
|
|
||||||
)
|
|
||||||
rem <- OptionT
|
rem <- OptionT
|
||||||
.whenF(sf.rememberMe && config.rememberMe.enabled)(
|
.whenF(sf.rememberMe && config.rememberMe.enabled)(
|
||||||
insertRememberToken(store, sf.token.account, config)
|
insertRememberToken(store, sf.token.account, config)
|
||||||
@@ -244,9 +239,7 @@ object Login {
|
|||||||
(for {
|
(for {
|
||||||
_ <- validateToken
|
_ <- validateToken
|
||||||
key <- EitherT.fromOptionF(
|
key <- EitherT.fromOptionF(
|
||||||
store.transact(
|
store.transact(RTotp.findEnabledByUserId(sf.token.account.userId, true)),
|
||||||
RTotp.findEnabledByUserId(sf.token.account.userId, enabled = true)
|
|
||||||
),
|
|
||||||
Result.invalidAuth
|
Result.invalidAuth
|
||||||
)
|
)
|
||||||
now <- EitherT.right[Result](Timestamp.current[F])
|
now <- EitherT.right[Result](Timestamp.current[F])
|
||||||
@@ -262,12 +255,7 @@ object Login {
|
|||||||
def okResult(acc: AccountInfo) =
|
def okResult(acc: AccountInfo) =
|
||||||
for {
|
for {
|
||||||
_ <- store.transact(RUser.updateLogin(acc))
|
_ <- store.transact(RUser.updateLogin(acc))
|
||||||
token <- AuthToken.user(
|
token <- AuthToken.user(acc, false, config.serverSecret, None)
|
||||||
acc,
|
|
||||||
requireSecondFactor = false,
|
|
||||||
config.serverSecret,
|
|
||||||
None
|
|
||||||
)
|
|
||||||
} yield Result.ok(token, None)
|
} yield Result.ok(token, None)
|
||||||
|
|
||||||
def rememberedLogin(rid: Ident) =
|
def rememberedLogin(rid: Ident) =
|
||||||
|
@@ -93,7 +93,7 @@ object AddonOps {
|
|||||||
AddonResult.executionFailed(
|
AddonResult.executionFailed(
|
||||||
new Exception(s"Addon run config ${id.id} not found.")
|
new Exception(s"Addon run config ${id.id} not found.")
|
||||||
) :: Nil,
|
) :: Nil,
|
||||||
pure = false
|
false
|
||||||
) :: Nil,
|
) :: Nil,
|
||||||
Nil
|
Nil
|
||||||
)
|
)
|
||||||
|
@@ -72,7 +72,7 @@ private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExte
|
|||||||
|
|
||||||
token <- AuthToken.user(
|
token <- AuthToken.user(
|
||||||
account,
|
account,
|
||||||
requireSecondFactor = false,
|
false,
|
||||||
secret.getOrElse(ByteVector.empty),
|
secret.getOrElse(ByteVector.empty),
|
||||||
tokenValidity.some
|
tokenValidity.some
|
||||||
)
|
)
|
||||||
|
@@ -194,14 +194,7 @@ object OCollective {
|
|||||||
id <- Ident.randomId[F]
|
id <- Ident.randomId[F]
|
||||||
settings = sett.emptyTrash.getOrElse(EmptyTrash.default)
|
settings = sett.emptyTrash.getOrElse(EmptyTrash.default)
|
||||||
args = EmptyTrashArgs(cid, settings.minAge)
|
args = EmptyTrashArgs(cid, settings.minAge)
|
||||||
ut = UserTask(
|
ut = UserTask(id, EmptyTrashArgs.taskName, true, settings.schedule, None, args)
|
||||||
id,
|
|
||||||
EmptyTrashArgs.taskName,
|
|
||||||
enabled = true,
|
|
||||||
settings.schedule,
|
|
||||||
None,
|
|
||||||
args
|
|
||||||
)
|
|
||||||
_ <- uts.updateOneTask(UserTaskScope.collective(cid), args.makeSubject.some, ut)
|
_ <- uts.updateOneTask(UserTaskScope.collective(cid), args.makeSubject.some, ut)
|
||||||
_ <- joex.notifyAllNodes
|
_ <- joex.notifyAllNodes
|
||||||
} yield ()
|
} yield ()
|
||||||
@@ -227,7 +220,7 @@ object OCollective {
|
|||||||
ut = UserTask(
|
ut = UserTask(
|
||||||
id,
|
id,
|
||||||
LearnClassifierArgs.taskName,
|
LearnClassifierArgs.taskName,
|
||||||
enabled = true,
|
true,
|
||||||
CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All),
|
CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All),
|
||||||
None,
|
None,
|
||||||
args
|
args
|
||||||
@@ -246,7 +239,7 @@ object OCollective {
|
|||||||
ut = UserTask(
|
ut = UserTask(
|
||||||
id,
|
id,
|
||||||
EmptyTrashArgs.taskName,
|
EmptyTrashArgs.taskName,
|
||||||
enabled = true,
|
true,
|
||||||
CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All),
|
CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All),
|
||||||
None,
|
None,
|
||||||
args
|
args
|
||||||
|
@@ -114,14 +114,14 @@ object ONotification {
|
|||||||
)
|
)
|
||||||
_ <- notMod.send(logbuf._2.andThen(log), ev, ch)
|
_ <- notMod.send(logbuf._2.andThen(log), ev, ch)
|
||||||
logs <- logbuf._1.get
|
logs <- logbuf._1.get
|
||||||
res = SendTestResult(success = true, logs)
|
res = SendTestResult(true, logs)
|
||||||
} yield res).attempt
|
} yield res).attempt
|
||||||
.map {
|
.map {
|
||||||
case Right(res) => res
|
case Right(res) => res
|
||||||
case Left(ex) =>
|
case Left(ex) =>
|
||||||
val ev =
|
val ev =
|
||||||
LogEvent.of(Level.Error, "Failed sending sample event").addError(ex)
|
LogEvent.of(Level.Error, "Failed sending sample event").addError(ex)
|
||||||
SendTestResult(success = false, Vector(ev))
|
SendTestResult(false, Vector(ev))
|
||||||
}
|
}
|
||||||
|
|
||||||
def listChannels(userId: Ident): F[Vector[Channel]] =
|
def listChannels(userId: Ident): F[Vector[Channel]] =
|
||||||
|
@@ -120,9 +120,7 @@ object OTotp {
|
|||||||
def confirmInit(accountId: AccountInfo, otp: OnetimePassword): F[ConfirmResult] =
|
def confirmInit(accountId: AccountInfo, otp: OnetimePassword): F[ConfirmResult] =
|
||||||
for {
|
for {
|
||||||
_ <- log.info(s"Confirm TOTP setup for account ${accountId.asString}")
|
_ <- log.info(s"Confirm TOTP setup for account ${accountId.asString}")
|
||||||
key <- store.transact(
|
key <- store.transact(RTotp.findEnabledByUserId(accountId.userId, false))
|
||||||
RTotp.findEnabledByUserId(accountId.userId, enabled = false)
|
|
||||||
)
|
|
||||||
now <- Timestamp.current[F]
|
now <- Timestamp.current[F]
|
||||||
res <- key match {
|
res <- key match {
|
||||||
case None =>
|
case None =>
|
||||||
@@ -131,7 +129,7 @@ object OTotp {
|
|||||||
val check = totp.checkPassword(r.secret, otp, now.value)
|
val check = totp.checkPassword(r.secret, otp, now.value)
|
||||||
if (check)
|
if (check)
|
||||||
store
|
store
|
||||||
.transact(RTotp.setEnabled(accountId.userId, enabled = true))
|
.transact(RTotp.setEnabled(accountId.userId, true))
|
||||||
.map(_ => ConfirmResult.Success)
|
.map(_ => ConfirmResult.Success)
|
||||||
else ConfirmResult.Failed.pure[F]
|
else ConfirmResult.Failed.pure[F]
|
||||||
}
|
}
|
||||||
@@ -142,7 +140,7 @@ object OTotp {
|
|||||||
case Some(pw) =>
|
case Some(pw) =>
|
||||||
for {
|
for {
|
||||||
_ <- log.info(s"Validating TOTP, because it is requested to disable it.")
|
_ <- log.info(s"Validating TOTP, because it is requested to disable it.")
|
||||||
key <- store.transact(RTotp.findEnabledByLogin(accountId, enabled = true))
|
key <- store.transact(RTotp.findEnabledByLogin(accountId, true))
|
||||||
now <- Timestamp.current[F]
|
now <- Timestamp.current[F]
|
||||||
res <- key match {
|
res <- key match {
|
||||||
case None =>
|
case None =>
|
||||||
@@ -151,7 +149,7 @@ object OTotp {
|
|||||||
val check = totp.checkPassword(r.secret, pw, now.value)
|
val check = totp.checkPassword(r.secret, pw, now.value)
|
||||||
if (check)
|
if (check)
|
||||||
UpdateResult.fromUpdate(
|
UpdateResult.fromUpdate(
|
||||||
store.transact(RTotp.setEnabled(r.userId, enabled = false))
|
store.transact(RTotp.setEnabled(r.userId, false))
|
||||||
)
|
)
|
||||||
else
|
else
|
||||||
log.info(s"TOTP code was invalid. Not disabling it.") *> UpdateResult
|
log.info(s"TOTP code was invalid. Not disabling it.") *> UpdateResult
|
||||||
@@ -162,15 +160,15 @@ object OTotp {
|
|||||||
case None =>
|
case None =>
|
||||||
UpdateResult.fromUpdate {
|
UpdateResult.fromUpdate {
|
||||||
(for {
|
(for {
|
||||||
key <- OptionT(RTotp.findEnabledByLogin(accountId, enabled = true))
|
key <- OptionT(RTotp.findEnabledByLogin(accountId, true))
|
||||||
n <- OptionT.liftF(RTotp.setEnabled(key.userId, enabled = false))
|
n <- OptionT.liftF(RTotp.setEnabled(key.userId, false))
|
||||||
} yield n).mapK(store.transform).getOrElse(0)
|
} yield n).mapK(store.transform).getOrElse(0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def state(acc: AccountInfo): F[OtpState] =
|
def state(acc: AccountInfo): F[OtpState] =
|
||||||
for {
|
for {
|
||||||
record <- store.transact(RTotp.findEnabledByUserId(acc.userId, enabled = true))
|
record <- store.transact(RTotp.findEnabledByUserId(acc.userId, true))
|
||||||
result = record match {
|
result = record match {
|
||||||
case Some(r) =>
|
case Some(r) =>
|
||||||
OtpState.Enabled(r.created)
|
OtpState.Enabled(r.created)
|
||||||
|
@@ -159,7 +159,7 @@ object OUpload {
|
|||||||
data.meta.skipDuplicates,
|
data.meta.skipDuplicates,
|
||||||
data.meta.fileFilter.some,
|
data.meta.fileFilter.some,
|
||||||
data.meta.tags.some,
|
data.meta.tags.some,
|
||||||
reprocess = false,
|
false,
|
||||||
data.meta.attachmentsOnly,
|
data.meta.attachmentsOnly,
|
||||||
data.meta.customData
|
data.meta.customData
|
||||||
)
|
)
|
||||||
|
@@ -32,12 +32,9 @@ class AuthTokenTest extends CatsEffectSuite {
|
|||||||
val otherSecret = ByteVector.fromValidHex("16bad")
|
val otherSecret = ByteVector.fromValidHex("16bad")
|
||||||
|
|
||||||
test("validate") {
|
test("validate") {
|
||||||
val token1 =
|
val token1 = AuthToken.user[IO](user, false, secret, None).unsafeRunSync()
|
||||||
AuthToken.user[IO](user, requireSecondFactor = false, secret, None).unsafeRunSync()
|
|
||||||
val token2 =
|
val token2 =
|
||||||
AuthToken
|
AuthToken.user[IO](user, false, secret, Duration.seconds(10).some).unsafeRunSync()
|
||||||
.user[IO](user, requireSecondFactor = false, secret, Duration.seconds(10).some)
|
|
||||||
.unsafeRunSync()
|
|
||||||
assert(token1.validate(secret, Duration.seconds(5)))
|
assert(token1.validate(secret, Duration.seconds(5)))
|
||||||
assert(!token1.validate(otherSecret, Duration.seconds(5)))
|
assert(!token1.validate(otherSecret, Duration.seconds(5)))
|
||||||
assert(!token1.copy(account = john).validate(secret, Duration.seconds(5)))
|
assert(!token1.copy(account = john).validate(secret, Duration.seconds(5)))
|
||||||
@@ -49,12 +46,9 @@ class AuthTokenTest extends CatsEffectSuite {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test("signature") {
|
test("signature") {
|
||||||
val token1 =
|
val token1 = AuthToken.user[IO](user, false, secret, None).unsafeRunSync()
|
||||||
AuthToken.user[IO](user, requireSecondFactor = false, secret, None).unsafeRunSync()
|
|
||||||
val token2 =
|
val token2 =
|
||||||
AuthToken
|
AuthToken.user[IO](user, false, secret, Duration.seconds(10).some).unsafeRunSync()
|
||||||
.user[IO](user, requireSecondFactor = false, secret, Duration.seconds(10).some)
|
|
||||||
.unsafeRunSync()
|
|
||||||
|
|
||||||
assert(token1.sigValid(secret))
|
assert(token1.sigValid(secret))
|
||||||
assert(token1.sigInvalid(otherSecret))
|
assert(token1.sigInvalid(otherSecret))
|
||||||
|
@@ -123,11 +123,6 @@ object Language {
|
|||||||
val iso3 = "jpn"
|
val iso3 = "jpn"
|
||||||
}
|
}
|
||||||
|
|
||||||
/*It's not an ISO value, but this needs to be unique and tesseract will need jpn_vert for it's scan from the config of /etc/docspell-joex/docspell-joex.conf.*/
|
|
||||||
case object JpnVert extends Language {
|
|
||||||
val iso2 = "ja_vert"
|
|
||||||
val iso3 = "jpn_vert"
|
|
||||||
}
|
|
||||||
case object Hebrew extends Language {
|
case object Hebrew extends Language {
|
||||||
val iso2 = "he"
|
val iso2 = "he"
|
||||||
val iso3 = "heb"
|
val iso3 = "heb"
|
||||||
@@ -177,7 +172,6 @@ object Language {
|
|||||||
Romanian,
|
Romanian,
|
||||||
Latvian,
|
Latvian,
|
||||||
Japanese,
|
Japanese,
|
||||||
JpnVert,
|
|
||||||
Hebrew,
|
Hebrew,
|
||||||
Lithuanian,
|
Lithuanian,
|
||||||
Polish,
|
Polish,
|
||||||
|
@@ -78,11 +78,7 @@ case class LenientUri(
|
|||||||
.covary[F]
|
.covary[F]
|
||||||
.rethrow
|
.rethrow
|
||||||
.flatMap(url =>
|
.flatMap(url =>
|
||||||
fs2.io.readInputStream(
|
fs2.io.readInputStream(Sync[F].delay(url.openStream()), chunkSize, true)
|
||||||
Sync[F].delay(url.openStream()),
|
|
||||||
chunkSize,
|
|
||||||
closeAfterUse = true
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def readText[F[_]: Sync](chunkSize: Int): F[String] =
|
def readText[F[_]: Sync](chunkSize: Int): F[String] =
|
||||||
@@ -125,7 +121,7 @@ object LenientUri {
|
|||||||
val isRoot = true
|
val isRoot = true
|
||||||
val isEmpty = false
|
val isEmpty = false
|
||||||
def /(seg: String): Path =
|
def /(seg: String): Path =
|
||||||
NonEmptyPath(NonEmptyList.of(seg), trailingSlash = false)
|
NonEmptyPath(NonEmptyList.of(seg), false)
|
||||||
def asString = "/"
|
def asString = "/"
|
||||||
}
|
}
|
||||||
case object EmptyPath extends Path {
|
case object EmptyPath extends Path {
|
||||||
@@ -133,7 +129,7 @@ object LenientUri {
|
|||||||
val isRoot = false
|
val isRoot = false
|
||||||
val isEmpty = true
|
val isEmpty = true
|
||||||
def /(seg: String): Path =
|
def /(seg: String): Path =
|
||||||
NonEmptyPath(NonEmptyList.of(seg), trailingSlash = false)
|
NonEmptyPath(NonEmptyList.of(seg), false)
|
||||||
def asString = ""
|
def asString = ""
|
||||||
}
|
}
|
||||||
case class NonEmptyPath(segs: NonEmptyList[String], trailingSlash: Boolean)
|
case class NonEmptyPath(segs: NonEmptyList[String], trailingSlash: Boolean)
|
||||||
|
@@ -194,7 +194,7 @@ object MimeType {
|
|||||||
val csValueStart = in.substring(n + "charset=".length).trim
|
val csValueStart = in.substring(n + "charset=".length).trim
|
||||||
val csName = csValueStart.indexOf(';') match {
|
val csName = csValueStart.indexOf(';') match {
|
||||||
case -1 => unquote(csValueStart).trim
|
case -1 => unquote(csValueStart).trim
|
||||||
case n2 => unquote(csValueStart.substring(0, n2)).trim
|
case n => unquote(csValueStart.substring(0, n)).trim
|
||||||
}
|
}
|
||||||
if (Charset.isSupported(csName)) Right((Some(Charset.forName(csName)), ""))
|
if (Charset.isSupported(csName)) Right((Some(Charset.forName(csName)), ""))
|
||||||
else Right((None, ""))
|
else Right((None, ""))
|
||||||
|
@@ -0,0 +1,212 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Eike K. & Contributors
|
||||||
|
*
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package docspell.common
|
||||||
|
|
||||||
|
import java.io.InputStream
|
||||||
|
import java.lang.ProcessBuilder.Redirect
|
||||||
|
import java.util.concurrent.TimeUnit
|
||||||
|
|
||||||
|
import scala.jdk.CollectionConverters._
|
||||||
|
|
||||||
|
import cats.effect._
|
||||||
|
import cats.implicits._
|
||||||
|
import fs2.io.file.Path
|
||||||
|
import fs2.{Stream, io, text}
|
||||||
|
|
||||||
|
import docspell.common.{exec => newExec}
|
||||||
|
import docspell.logging.Logger
|
||||||
|
|
||||||
|
// better use `SysCmd` and `SysExec`
|
||||||
|
object SystemCommand {
|
||||||
|
|
||||||
|
final case class Config(
|
||||||
|
program: String,
|
||||||
|
args: Seq[String],
|
||||||
|
timeout: Duration,
|
||||||
|
env: Map[String, String] = Map.empty
|
||||||
|
) {
|
||||||
|
|
||||||
|
def toSysCmd = newExec
|
||||||
|
.SysCmd(program, newExec.Args(args))
|
||||||
|
.withTimeout(timeout)
|
||||||
|
.addEnv(newExec.Env(env))
|
||||||
|
|
||||||
|
def mapArgs(f: String => String): Config =
|
||||||
|
Config(program, args.map(f), timeout)
|
||||||
|
|
||||||
|
def replace(repl: Map[String, String]): Config =
|
||||||
|
mapArgs(s =>
|
||||||
|
repl.foldLeft(s) { case (res, (k, v)) =>
|
||||||
|
res.replace(k, v)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def withEnv(key: String, value: String): Config =
|
||||||
|
copy(env = env.updated(key, value))
|
||||||
|
|
||||||
|
def addEnv(moreEnv: Map[String, String]): Config =
|
||||||
|
copy(env = env ++ moreEnv)
|
||||||
|
|
||||||
|
def appendArgs(extraArgs: Args): Config =
|
||||||
|
copy(args = args ++ extraArgs.args)
|
||||||
|
|
||||||
|
def appendArgs(extraArgs: Seq[String]): Config =
|
||||||
|
copy(args = args ++ extraArgs)
|
||||||
|
|
||||||
|
def toCmd: List[String] =
|
||||||
|
program :: args.toList
|
||||||
|
|
||||||
|
lazy val cmdString: String =
|
||||||
|
toCmd.mkString(" ")
|
||||||
|
}
|
||||||
|
|
||||||
|
final case class Args(args: Vector[String]) extends Iterable[String] {
|
||||||
|
override def iterator = args.iterator
|
||||||
|
|
||||||
|
def prepend(a: String): Args = Args(a +: args)
|
||||||
|
|
||||||
|
def prependWhen(flag: Boolean)(a: String): Args =
|
||||||
|
prependOption(Option.when(flag)(a))
|
||||||
|
|
||||||
|
def prependOption(value: Option[String]): Args =
|
||||||
|
value.map(prepend).getOrElse(this)
|
||||||
|
|
||||||
|
def append(a: String, as: String*): Args =
|
||||||
|
Args(args ++ (a +: as.toVector))
|
||||||
|
|
||||||
|
def appendOption(value: Option[String]): Args =
|
||||||
|
value.map(append(_)).getOrElse(this)
|
||||||
|
|
||||||
|
def appendOptionVal(first: String, second: Option[String]): Args =
|
||||||
|
second.map(b => append(first, b)).getOrElse(this)
|
||||||
|
|
||||||
|
def appendWhen(flag: Boolean)(a: String, as: String*): Args =
|
||||||
|
if (flag) append(a, as: _*) else this
|
||||||
|
|
||||||
|
def appendWhenNot(flag: Boolean)(a: String, as: String*): Args =
|
||||||
|
if (!flag) append(a, as: _*) else this
|
||||||
|
|
||||||
|
def append(p: Path): Args =
|
||||||
|
append(p.toString)
|
||||||
|
|
||||||
|
def append(as: Iterable[String]): Args =
|
||||||
|
Args(args ++ as.toVector)
|
||||||
|
}
|
||||||
|
object Args {
|
||||||
|
val empty: Args = Args()
|
||||||
|
|
||||||
|
def apply(as: String*): Args =
|
||||||
|
Args(as.toVector)
|
||||||
|
}
|
||||||
|
|
||||||
|
final case class Result(rc: Int, stdout: String, stderr: String)
|
||||||
|
|
||||||
|
def exec[F[_]: Sync](
|
||||||
|
cmd: Config,
|
||||||
|
logger: Logger[F],
|
||||||
|
wd: Option[Path] = None,
|
||||||
|
stdin: Stream[F, Byte] = Stream.empty
|
||||||
|
): Stream[F, Result] =
|
||||||
|
startProcess(cmd, wd, logger, stdin) { proc =>
|
||||||
|
Stream.eval {
|
||||||
|
for {
|
||||||
|
_ <- writeToProcess(stdin, proc)
|
||||||
|
term <- Sync[F].blocking(proc.waitFor(cmd.timeout.seconds, TimeUnit.SECONDS))
|
||||||
|
_ <-
|
||||||
|
if (term)
|
||||||
|
logger.debug(s"Command `${cmd.cmdString}` finished: ${proc.exitValue}")
|
||||||
|
else
|
||||||
|
logger.warn(
|
||||||
|
s"Command `${cmd.cmdString}` did not finish in ${cmd.timeout.formatExact}!"
|
||||||
|
)
|
||||||
|
_ <- if (!term) timeoutError(proc, cmd) else Sync[F].pure(())
|
||||||
|
out <-
|
||||||
|
if (term) inputStreamToString(proc.getInputStream)
|
||||||
|
else Sync[F].pure("")
|
||||||
|
err <-
|
||||||
|
if (term) inputStreamToString(proc.getErrorStream)
|
||||||
|
else Sync[F].pure("")
|
||||||
|
} yield Result(proc.exitValue, out, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def execSuccess[F[_]: Sync](
|
||||||
|
cmd: Config,
|
||||||
|
logger: Logger[F],
|
||||||
|
wd: Option[Path] = None,
|
||||||
|
stdin: Stream[F, Byte] = Stream.empty
|
||||||
|
): Stream[F, Result] =
|
||||||
|
exec(cmd, logger, wd, stdin).flatMap { r =>
|
||||||
|
if (r.rc != 0)
|
||||||
|
Stream.raiseError[F](
|
||||||
|
new Exception(
|
||||||
|
s"Command `${cmd.cmdString}` returned non-zero exit code ${r.rc}. Stderr: ${r.stderr}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else Stream.emit(r)
|
||||||
|
}
|
||||||
|
|
||||||
|
private def startProcess[F[_]: Sync, A](
|
||||||
|
cmd: Config,
|
||||||
|
wd: Option[Path],
|
||||||
|
logger: Logger[F],
|
||||||
|
stdin: Stream[F, Byte]
|
||||||
|
)(
|
||||||
|
f: Process => Stream[F, A]
|
||||||
|
): Stream[F, A] = {
|
||||||
|
val log = logger.debug(s"Running external command: ${cmd.cmdString}")
|
||||||
|
val hasStdin = stdin.take(1).compile.last.map(_.isDefined)
|
||||||
|
val proc = log *> hasStdin.flatMap(flag =>
|
||||||
|
Sync[F].blocking {
|
||||||
|
val pb = new ProcessBuilder(cmd.toCmd.asJava)
|
||||||
|
.redirectInput(if (flag) Redirect.PIPE else Redirect.INHERIT)
|
||||||
|
.redirectError(Redirect.PIPE)
|
||||||
|
.redirectOutput(Redirect.PIPE)
|
||||||
|
|
||||||
|
val pbEnv = pb.environment()
|
||||||
|
cmd.env.foreach { case (key, value) =>
|
||||||
|
pbEnv.put(key, value)
|
||||||
|
}
|
||||||
|
wd.map(_.toNioPath.toFile).foreach(pb.directory)
|
||||||
|
pb.start()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
Stream
|
||||||
|
.bracket(proc)(p =>
|
||||||
|
logger.debug(s"Closing process: `${cmd.cmdString}`").map(_ => p.destroy())
|
||||||
|
)
|
||||||
|
.flatMap(f)
|
||||||
|
}
|
||||||
|
|
||||||
|
private def inputStreamToString[F[_]: Sync](in: InputStream): F[String] =
|
||||||
|
io.readInputStream(Sync[F].pure(in), 16 * 1024, closeAfterUse = false)
|
||||||
|
.through(text.utf8.decode)
|
||||||
|
.chunks
|
||||||
|
.map(_.toVector.mkString)
|
||||||
|
.fold1(_ + _)
|
||||||
|
.compile
|
||||||
|
.last
|
||||||
|
.map(_.getOrElse(""))
|
||||||
|
|
||||||
|
private def writeToProcess[F[_]: Sync](
|
||||||
|
data: Stream[F, Byte],
|
||||||
|
proc: Process
|
||||||
|
): F[Unit] =
|
||||||
|
data
|
||||||
|
.through(io.writeOutputStream(Sync[F].blocking(proc.getOutputStream)))
|
||||||
|
.compile
|
||||||
|
.drain
|
||||||
|
|
||||||
|
private def timeoutError[F[_]: Sync](proc: Process, cmd: Config): F[Unit] =
|
||||||
|
Sync[F].blocking(proc.destroyForcibly()).attempt *> {
|
||||||
|
Sync[F].raiseError(
|
||||||
|
new Exception(
|
||||||
|
s"Command `${cmd.cmdString}` timed out (${cmd.timeout.formatExact})"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
@@ -62,7 +62,7 @@ object UrlMatcher {
|
|||||||
// strip path to only match prefixes
|
// strip path to only match prefixes
|
||||||
val mPath: LenientUri.Path =
|
val mPath: LenientUri.Path =
|
||||||
NonEmptyList.fromList(url.path.segments.take(pathSegmentCount)) match {
|
NonEmptyList.fromList(url.path.segments.take(pathSegmentCount)) match {
|
||||||
case Some(nel) => LenientUri.NonEmptyPath(nel, trailingSlash = false)
|
case Some(nel) => LenientUri.NonEmptyPath(nel, false)
|
||||||
case None => LenientUri.RootPath
|
case None => LenientUri.RootPath
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -17,9 +17,6 @@ case class Env(values: Map[String, String]) {
|
|||||||
def addAll(e: Env): Env =
|
def addAll(e: Env): Env =
|
||||||
Env(values ++ e.values)
|
Env(values ++ e.values)
|
||||||
|
|
||||||
def modifyValue(f: String => String): Env =
|
|
||||||
Env(values.view.mapValues(f).toMap)
|
|
||||||
|
|
||||||
def ++(e: Env) = addAll(e)
|
def ++(e: Env) = addAll(e)
|
||||||
|
|
||||||
def foreach(f: (String, String) => Unit): Unit =
|
def foreach(f: (String, String) => Unit): Unit =
|
||||||
|
@@ -1,89 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2020 Eike K. & Contributors
|
|
||||||
*
|
|
||||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
*/
|
|
||||||
|
|
||||||
package docspell.common.exec
|
|
||||||
|
|
||||||
import docspell.common.Duration
|
|
||||||
import docspell.common.Ident
|
|
||||||
import docspell.common.exec.Env
|
|
||||||
import docspell.common.exec.ExternalCommand.ArgMapping
|
|
||||||
import docspell.common.exec.SysCmd
|
|
||||||
|
|
||||||
final case class ExternalCommand(
|
|
||||||
program: String,
|
|
||||||
args: Seq[String],
|
|
||||||
timeout: Duration,
|
|
||||||
env: Map[String, String] = Map.empty,
|
|
||||||
argMappings: Map[Ident, ArgMapping] = Map.empty
|
|
||||||
) {
|
|
||||||
def withVars(vars: Map[String, String]): ExternalCommand.WithVars =
|
|
||||||
ExternalCommand.WithVars(this, vars)
|
|
||||||
|
|
||||||
import ExternalCommand.pattern
|
|
||||||
|
|
||||||
def resolve(vars: Map[String, String]): SysCmd = {
|
|
||||||
val replace = ExternalCommand.replaceString(vars) _
|
|
||||||
val resolvedArgMappings =
|
|
||||||
argMappings.view.mapValues(_.resolve(replace).firstMatch).toMap
|
|
||||||
val resolvedArgs = args.map(replace).flatMap { arg =>
|
|
||||||
resolvedArgMappings
|
|
||||||
.find(e => pattern(e._1.id) == arg)
|
|
||||||
.map(_._2)
|
|
||||||
.getOrElse(List(arg))
|
|
||||||
}
|
|
||||||
|
|
||||||
SysCmd(replace(program), resolvedArgs: _*)
|
|
||||||
.withTimeout(timeout)
|
|
||||||
.withEnv(_ => Env(env).modifyValue(replace))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
object ExternalCommand {
|
|
||||||
private val openPattern = "{{"
|
|
||||||
private val closePattern = "}}"
|
|
||||||
|
|
||||||
private def pattern(s: String): String = s"${openPattern}${s}${closePattern}"
|
|
||||||
|
|
||||||
def apply(program: String, args: Seq[String], timeout: Duration): ExternalCommand =
|
|
||||||
ExternalCommand(program, args, timeout, Map.empty, Map.empty)
|
|
||||||
|
|
||||||
final case class ArgMapping(
|
|
||||||
value: String,
|
|
||||||
mappings: List[ArgMatch]
|
|
||||||
) {
|
|
||||||
private[exec] def resolve(replace: String => String): ArgMapping =
|
|
||||||
ArgMapping(replace(value), mappings.map(_.resolve(replace)))
|
|
||||||
|
|
||||||
def firstMatch: List[String] =
|
|
||||||
mappings.find(am => value.matches(am.matches)).map(_.args).getOrElse(Nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
final case class ArgMatch(
|
|
||||||
matches: String,
|
|
||||||
args: List[String]
|
|
||||||
) {
|
|
||||||
private[exec] def resolve(replace: String => String): ArgMatch =
|
|
||||||
ArgMatch(replace(matches), args.map(replace))
|
|
||||||
}
|
|
||||||
|
|
||||||
private def replaceString(vars: Map[String, String])(in: String): String =
|
|
||||||
vars.foldLeft(in) { case (result, (name, value)) =>
|
|
||||||
val key = s"{{$name}}"
|
|
||||||
result.replace(key, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
final case class WithVars(cmd: ExternalCommand, vars: Map[String, String]) {
|
|
||||||
def resolved: SysCmd = cmd.resolve(vars)
|
|
||||||
def append(more: (String, String)*): WithVars =
|
|
||||||
WithVars(cmd, vars ++ more.toMap)
|
|
||||||
|
|
||||||
def withVar(key: String, value: String): WithVars =
|
|
||||||
WithVars(cmd, vars.updated(key, value))
|
|
||||||
|
|
||||||
def withVarOption(key: String, value: Option[String]): WithVars =
|
|
||||||
value.map(withVar(key, _)).getOrElse(this)
|
|
||||||
}
|
|
||||||
}
|
|
@@ -38,20 +38,6 @@ trait SysExec[F[_]] {
|
|||||||
|
|
||||||
def waitFor(timeout: Option[Duration] = None): F[Int]
|
def waitFor(timeout: Option[Duration] = None): F[Int]
|
||||||
|
|
||||||
/** Uses `waitFor` and throws when return code is non-zero. Logs stderr and stdout while
|
|
||||||
* waiting.
|
|
||||||
*/
|
|
||||||
def runToSuccess(logger: Logger[F], timeout: Option[Duration] = None)(implicit
|
|
||||||
F: Async[F]
|
|
||||||
): F[Int]
|
|
||||||
|
|
||||||
/** Uses `waitFor` and throws when return code is non-zero. Logs stderr while waiting
|
|
||||||
* and collects stdout once finished successfully.
|
|
||||||
*/
|
|
||||||
def runToSuccessStdout(logger: Logger[F], timeout: Option[Duration] = None)(implicit
|
|
||||||
F: Async[F]
|
|
||||||
): F[String]
|
|
||||||
|
|
||||||
/** Sends a signal to the process to terminate it immediately */
|
/** Sends a signal to the process to terminate it immediately */
|
||||||
def cancel: F[Unit]
|
def cancel: F[Unit]
|
||||||
|
|
||||||
@@ -89,12 +75,6 @@ object SysExec {
|
|||||||
proc <- startProcess(logger, cmd, workdir, stdin)
|
proc <- startProcess(logger, cmd, workdir, stdin)
|
||||||
fibers <- Resource.eval(Ref.of[F, List[F[Unit]]](Nil))
|
fibers <- Resource.eval(Ref.of[F, List[F[Unit]]](Nil))
|
||||||
} yield new SysExec[F] {
|
} yield new SysExec[F] {
|
||||||
private lazy val basicName: String =
|
|
||||||
cmd.program.lastIndexOf(java.io.File.separatorChar.toInt) match {
|
|
||||||
case n if n > 0 => cmd.program.drop(n + 1)
|
|
||||||
case _ => cmd.program.takeRight(16)
|
|
||||||
}
|
|
||||||
|
|
||||||
def stdout: Stream[F, Byte] =
|
def stdout: Stream[F, Byte] =
|
||||||
fs2.io.readInputStream(
|
fs2.io.readInputStream(
|
||||||
Sync[F].blocking(proc.getInputStream),
|
Sync[F].blocking(proc.getInputStream),
|
||||||
@@ -127,39 +107,6 @@ object SysExec {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
def runToSuccess(logger: Logger[F], timeout: Option[Duration])(implicit
|
|
||||||
F: Async[F]
|
|
||||||
): F[Int] =
|
|
||||||
logOutputs(logger, basicName).use(_.waitFor(timeout).flatMap {
|
|
||||||
case rc if rc == 0 => Sync[F].pure(0)
|
|
||||||
case rc =>
|
|
||||||
Sync[F].raiseError(
|
|
||||||
new Exception(s"Command `${cmd.program}` returned non-zero exit code ${rc}")
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
def runToSuccessStdout(logger: Logger[F], timeout: Option[Duration])(implicit
|
|
||||||
F: Async[F]
|
|
||||||
): F[String] =
|
|
||||||
F.background(
|
|
||||||
stderrLines
|
|
||||||
.through(line => Stream.eval(logger.debug(s"[$basicName (err)]: $line")))
|
|
||||||
.compile
|
|
||||||
.drain
|
|
||||||
).use { f1 =>
|
|
||||||
waitFor(timeout)
|
|
||||||
.flatMap {
|
|
||||||
case rc if rc == 0 => stdout.through(fs2.text.utf8.decode).compile.string
|
|
||||||
case rc =>
|
|
||||||
Sync[F].raiseError[String](
|
|
||||||
new Exception(
|
|
||||||
s"Command `${cmd.program}` returned non-zero exit code ${rc}"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
.flatTap(_ => f1)
|
|
||||||
}
|
|
||||||
|
|
||||||
def consumeOutputs(out: Pipe[F, String, Unit], err: Pipe[F, String, Unit])(implicit
|
def consumeOutputs(out: Pipe[F, String, Unit], err: Pipe[F, String, Unit])(implicit
|
||||||
F: Async[F]
|
F: Async[F]
|
||||||
): Resource[F, SysExec[F]] =
|
): Resource[F, SysExec[F]] =
|
||||||
|
@@ -6,7 +6,6 @@
|
|||||||
|
|
||||||
package docspell.common.util
|
package docspell.common.util
|
||||||
|
|
||||||
import cats.data.OptionT
|
|
||||||
import cats.effect._
|
import cats.effect._
|
||||||
import cats.syntax.all._
|
import cats.syntax.all._
|
||||||
import cats.{Applicative, Monad}
|
import cats.{Applicative, Monad}
|
||||||
@@ -27,10 +26,10 @@ object Directory {
|
|||||||
(dir :: dirs.toList).traverse_(Files[F].createDirectories(_))
|
(dir :: dirs.toList).traverse_(Files[F].createDirectories(_))
|
||||||
|
|
||||||
def nonEmpty[F[_]: Files: Sync](dir: Path): F[Boolean] =
|
def nonEmpty[F[_]: Files: Sync](dir: Path): F[Boolean] =
|
||||||
OptionT
|
List(
|
||||||
.whenM(Files[F].isDirectory(dir))(Files[F].list(dir).take(1).compile.toList)
|
Files[F].isDirectory(dir),
|
||||||
.map(_.nonEmpty)
|
Files[F].list(dir).take(1).compile.last.map(_.isDefined)
|
||||||
.isDefined
|
).sequence.map(_.forall(identity))
|
||||||
|
|
||||||
def isEmpty[F[_]: Files: Sync](dir: Path): F[Boolean] =
|
def isEmpty[F[_]: Files: Sync](dir: Path): F[Boolean] =
|
||||||
nonEmpty(dir).map(b => !b)
|
nonEmpty(dir).map(b => !b)
|
||||||
|
@@ -1,74 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2020 Eike K. & Contributors
|
|
||||||
*
|
|
||||||
* SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
*/
|
|
||||||
|
|
||||||
package docspell.common.exec
|
|
||||||
|
|
||||||
import docspell.common.Duration
|
|
||||||
import docspell.common.Ident
|
|
||||||
import docspell.common.exec.Args
|
|
||||||
import docspell.common.exec.Env
|
|
||||||
import docspell.common.exec.ExternalCommand._
|
|
||||||
import docspell.common.exec.SysCmd
|
|
||||||
|
|
||||||
import munit.FunSuite
|
|
||||||
|
|
||||||
class ExternalCommandTest extends FunSuite {
|
|
||||||
|
|
||||||
test("resolve") {
|
|
||||||
val cmd = ExternalCommand(
|
|
||||||
program = "tesseract",
|
|
||||||
args = "{{infile}}" :: "{{lang-spec}}" :: "out" :: "pdf" :: "txt" :: Nil,
|
|
||||||
timeout = Duration.minutes(5),
|
|
||||||
env = Map.empty,
|
|
||||||
argMappings = Map(
|
|
||||||
Ident.unsafe("lang-spec") -> ArgMapping(
|
|
||||||
value = "{{lang}}",
|
|
||||||
mappings = List(
|
|
||||||
ArgMatch(
|
|
||||||
matches = "jpn_vert",
|
|
||||||
args = List("-l", "jpn_vert", "-c", "preserve_interword_spaces=1")
|
|
||||||
),
|
|
||||||
ArgMatch(
|
|
||||||
matches = ".*",
|
|
||||||
args = List("-l", "{{lang}}")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
val varsDe = Map("lang" -> "de", "encoding" -> "UTF_8", "infile" -> "text.jpg")
|
|
||||||
assertEquals(
|
|
||||||
cmd.resolve(varsDe),
|
|
||||||
SysCmd(
|
|
||||||
"tesseract",
|
|
||||||
Args.of("text.jpg", "-l", "de", "out", "pdf", "txt"),
|
|
||||||
Env.empty,
|
|
||||||
Duration.minutes(5)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
val varsJpnVert = varsDe.updated("lang", "jpn_vert")
|
|
||||||
assertEquals(
|
|
||||||
cmd.resolve(varsJpnVert),
|
|
||||||
SysCmd(
|
|
||||||
"tesseract",
|
|
||||||
Args.of(
|
|
||||||
"text.jpg",
|
|
||||||
"-l",
|
|
||||||
"jpn_vert",
|
|
||||||
"-c",
|
|
||||||
"preserve_interword_spaces=1",
|
|
||||||
"out",
|
|
||||||
"pdf",
|
|
||||||
"txt"
|
|
||||||
),
|
|
||||||
Env.empty,
|
|
||||||
Duration.minutes(5)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
@@ -16,7 +16,7 @@ import munit.CatsEffectSuite
|
|||||||
|
|
||||||
class DirectoryTest extends CatsEffectSuite with TestLoggingConfig {
|
class DirectoryTest extends CatsEffectSuite with TestLoggingConfig {
|
||||||
val logger = docspell.logging.getLogger[IO]
|
val logger = docspell.logging.getLogger[IO]
|
||||||
val tempDir = ResourceFunFixture(
|
val tempDir = ResourceFixture(
|
||||||
Files[IO].tempDirectory(Path("target").some, "directory-test-", None)
|
Files[IO].tempDirectory(Path("target").some, "directory-test-", None)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -11,8 +11,7 @@ import cats.implicits._
|
|||||||
import fs2.io.file.{Files, Path}
|
import fs2.io.file.{Files, Path}
|
||||||
import fs2.{Pipe, Stream}
|
import fs2.{Pipe, Stream}
|
||||||
|
|
||||||
import docspell.common.exec.ExternalCommand
|
import docspell.common._
|
||||||
import docspell.common.exec.SysExec
|
|
||||||
import docspell.common.util.File
|
import docspell.common.util.File
|
||||||
import docspell.convert.ConversionResult
|
import docspell.convert.ConversionResult
|
||||||
import docspell.convert.ConversionResult.{Handler, successPdf, successPdfTxt}
|
import docspell.convert.ConversionResult.{Handler, successPdf, successPdfTxt}
|
||||||
@@ -22,11 +21,11 @@ private[extern] object ExternConv {
|
|||||||
|
|
||||||
def toPDF[F[_]: Async: Files, A](
|
def toPDF[F[_]: Async: Files, A](
|
||||||
name: String,
|
name: String,
|
||||||
cmdCfg: ExternalCommand.WithVars,
|
cmdCfg: SystemCommand.Config,
|
||||||
wd: Path,
|
wd: Path,
|
||||||
useStdin: Boolean,
|
useStdin: Boolean,
|
||||||
logger: Logger[F],
|
logger: Logger[F],
|
||||||
reader: (Path, Int) => F[ConversionResult[F]]
|
reader: (Path, SystemCommand.Result) => F[ConversionResult[F]]
|
||||||
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] =
|
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] =
|
||||||
Stream
|
Stream
|
||||||
.resource(File.withTempDir[F](wd, s"docspell-$name"))
|
.resource(File.withTempDir[F](wd, s"docspell-$name"))
|
||||||
@@ -34,21 +33,32 @@ private[extern] object ExternConv {
|
|||||||
val inFile = dir.resolve("infile").absolute.normalize
|
val inFile = dir.resolve("infile").absolute.normalize
|
||||||
val out = dir.resolve("out.pdf").absolute.normalize
|
val out = dir.resolve("out.pdf").absolute.normalize
|
||||||
val sysCfg =
|
val sysCfg =
|
||||||
cmdCfg
|
cmdCfg.replace(
|
||||||
.withVar("outfile", out.toString)
|
Map(
|
||||||
.withVarOption("infile", Option.when(!useStdin)(inFile.toString))
|
"{{outfile}}" -> out.toString
|
||||||
.resolved
|
) ++
|
||||||
|
(if (!useStdin) Map("{{infile}}" -> inFile.toString)
|
||||||
|
else Map.empty)
|
||||||
|
)
|
||||||
|
|
||||||
val createInput: Pipe[F, Byte, Unit] =
|
val createInput: Pipe[F, Byte, Unit] =
|
||||||
if (useStdin) _ => Stream.emit(())
|
if (useStdin) _ => Stream.emit(())
|
||||||
else storeDataToFile(name, logger, inFile)
|
else storeDataToFile(name, logger, inFile)
|
||||||
|
|
||||||
in.through(createInput).evalMap { _ =>
|
in.through(createInput).flatMap { _ =>
|
||||||
SysExec(sysCfg, logger, Some(dir), Option.when(useStdin)(in))
|
SystemCommand
|
||||||
.flatMap(_.logOutputs(logger, name))
|
.exec[F](
|
||||||
.use { proc =>
|
sysCfg,
|
||||||
proc.waitFor().flatMap(rc => reader(out, rc).flatMap(handler.run))
|
logger,
|
||||||
}
|
Some(dir),
|
||||||
|
if (useStdin) in
|
||||||
|
else Stream.empty
|
||||||
|
)
|
||||||
|
.evalMap(result =>
|
||||||
|
logResult(name, result, logger)
|
||||||
|
.flatMap(_ => reader(out, result))
|
||||||
|
.flatMap(handler.run)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.compile
|
.compile
|
||||||
@@ -64,9 +74,9 @@ private[extern] object ExternConv {
|
|||||||
def readResult[F[_]: Async: Files](
|
def readResult[F[_]: Async: Files](
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
)(out: Path, result: Int): F[ConversionResult[F]] =
|
)(out: Path, result: SystemCommand.Result): F[ConversionResult[F]] =
|
||||||
File.existsNonEmpty[F](out).flatMap {
|
File.existsNonEmpty[F](out).flatMap {
|
||||||
case true if result == 0 =>
|
case true if result.rc == 0 =>
|
||||||
val outTxt = out.resolveSibling(out.fileName.toString + ".txt")
|
val outTxt = out.resolveSibling(out.fileName.toString + ".txt")
|
||||||
File.existsNonEmpty[F](outTxt).flatMap {
|
File.existsNonEmpty[F](outTxt).flatMap {
|
||||||
case true =>
|
case true =>
|
||||||
@@ -78,13 +88,13 @@ private[extern] object ExternConv {
|
|||||||
successPdf(File.readAll(out, chunkSize)).pure[F]
|
successPdf(File.readAll(out, chunkSize)).pure[F]
|
||||||
}
|
}
|
||||||
case true =>
|
case true =>
|
||||||
logger.warn(s"Command not successful (rc=${result}), but file exists.") *>
|
logger.warn(s"Command not successful (rc=${result.rc}), but file exists.") *>
|
||||||
successPdf(File.readAll(out, chunkSize)).pure[F]
|
successPdf(File.readAll(out, chunkSize)).pure[F]
|
||||||
|
|
||||||
case false =>
|
case false =>
|
||||||
ConversionResult
|
ConversionResult
|
||||||
.failure[F](
|
.failure[F](
|
||||||
new Exception(s"Command result=${result}. No output file found.")
|
new Exception(s"Command result=${result.rc}. No output file found.")
|
||||||
)
|
)
|
||||||
.pure[F]
|
.pure[F]
|
||||||
}
|
}
|
||||||
@@ -93,25 +103,25 @@ private[extern] object ExternConv {
|
|||||||
outPrefix: String,
|
outPrefix: String,
|
||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
)(out: Path, result: Int): F[ConversionResult[F]] = {
|
)(out: Path, result: SystemCommand.Result): F[ConversionResult[F]] = {
|
||||||
val outPdf = out.resolveSibling(s"$outPrefix.pdf")
|
val outPdf = out.resolveSibling(s"$outPrefix.pdf")
|
||||||
File.existsNonEmpty[F](outPdf).flatMap {
|
File.existsNonEmpty[F](outPdf).flatMap {
|
||||||
case true =>
|
case true =>
|
||||||
val outTxt = out.resolveSibling(s"$outPrefix.txt")
|
val outTxt = out.resolveSibling(s"$outPrefix.txt")
|
||||||
File.exists(outTxt).flatMap { txtExists =>
|
File.exists(outTxt).flatMap { txtExists =>
|
||||||
val pdfData = File.readAll(out, chunkSize)
|
val pdfData = File.readAll(out, chunkSize)
|
||||||
if (result == 0)
|
if (result.rc == 0)
|
||||||
if (txtExists) successPdfTxt(pdfData, File.readText(outTxt)).pure[F]
|
if (txtExists) successPdfTxt(pdfData, File.readText(outTxt)).pure[F]
|
||||||
else successPdf(pdfData).pure[F]
|
else successPdf(pdfData).pure[F]
|
||||||
else
|
else
|
||||||
logger.warn(s"Command not successful (rc=${result}), but file exists.") *>
|
logger.warn(s"Command not successful (rc=${result.rc}), but file exists.") *>
|
||||||
successPdf(pdfData).pure[F]
|
successPdf(pdfData).pure[F]
|
||||||
}
|
}
|
||||||
|
|
||||||
case false =>
|
case false =>
|
||||||
ConversionResult
|
ConversionResult
|
||||||
.failure[F](
|
.failure[F](
|
||||||
new Exception(s"Command result=${result}. No output file found.")
|
new Exception(s"Command result=${result.rc}. No output file found.")
|
||||||
)
|
)
|
||||||
.pure[F]
|
.pure[F]
|
||||||
}
|
}
|
||||||
@@ -128,6 +138,14 @@ private[extern] object ExternConv {
|
|||||||
.drain ++
|
.drain ++
|
||||||
Stream.eval(storeFile(in, inFile))
|
Stream.eval(storeFile(in, inFile))
|
||||||
|
|
||||||
|
private def logResult[F[_]: Sync](
|
||||||
|
name: String,
|
||||||
|
result: SystemCommand.Result,
|
||||||
|
logger: Logger[F]
|
||||||
|
): F[Unit] =
|
||||||
|
logger.debug(s"$name stdout: ${result.stdout}") *>
|
||||||
|
logger.debug(s"$name stderr: ${result.stderr}")
|
||||||
|
|
||||||
private def storeFile[F[_]: Async: Files](
|
private def storeFile[F[_]: Async: Files](
|
||||||
in: Stream[F, Byte],
|
in: Stream[F, Byte],
|
||||||
target: Path
|
target: Path
|
||||||
|
@@ -24,16 +24,14 @@ object OcrMyPdf {
|
|||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] =
|
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] =
|
||||||
if (cfg.enabled) {
|
if (cfg.enabled) {
|
||||||
val reader: (Path, Int) => F[ConversionResult[F]] =
|
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] =
|
||||||
ExternConv.readResult[F](chunkSize, logger)
|
ExternConv.readResult[F](chunkSize, logger)
|
||||||
|
|
||||||
val cmd = cfg.command.withVars(Map("lang" -> lang.iso3))
|
|
||||||
|
|
||||||
ExternConv.toPDF[F, A](
|
ExternConv.toPDF[F, A](
|
||||||
"ocrmypdf",
|
"ocrmypdf",
|
||||||
cmd,
|
cfg.command.replace(Map("{{lang}}" -> lang.iso3)),
|
||||||
cfg.workingDir,
|
cfg.workingDir,
|
||||||
useStdin = false,
|
false,
|
||||||
logger,
|
logger,
|
||||||
reader
|
reader
|
||||||
)(in, handler)
|
)(in, handler)
|
||||||
|
@@ -8,10 +8,10 @@ package docspell.convert.extern
|
|||||||
|
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
|
|
||||||
import docspell.common.exec.ExternalCommand
|
import docspell.common.SystemCommand
|
||||||
|
|
||||||
case class OcrMyPdfConfig(
|
case class OcrMyPdfConfig(
|
||||||
enabled: Boolean,
|
enabled: Boolean,
|
||||||
command: ExternalCommand,
|
command: SystemCommand.Config,
|
||||||
workingDir: Path
|
workingDir: Path
|
||||||
)
|
)
|
||||||
|
@@ -24,18 +24,17 @@ object Tesseract {
|
|||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
|
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
|
||||||
val outBase = cfg.command.args.tail.headOption.getOrElse("out")
|
val outBase = cfg.command.args.tail.headOption.getOrElse("out")
|
||||||
val reader: (Path, Int) => F[ConversionResult[F]] =
|
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] =
|
||||||
ExternConv.readResultTesseract[F](outBase, chunkSize, logger)
|
ExternConv.readResultTesseract[F](outBase, chunkSize, logger)
|
||||||
|
|
||||||
val cmd = cfg.command.withVars(Map("lang" -> lang.iso3))
|
|
||||||
|
|
||||||
ExternConv.toPDF[F, A](
|
ExternConv.toPDF[F, A](
|
||||||
"tesseract",
|
"tesseract",
|
||||||
cmd,
|
cfg.command.replace(Map("{{lang}}" -> lang.iso3)),
|
||||||
cfg.workingDir,
|
cfg.workingDir,
|
||||||
useStdin = false,
|
false,
|
||||||
logger,
|
logger,
|
||||||
reader
|
reader
|
||||||
)(in, handler)
|
)(in, handler)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -8,6 +8,6 @@ package docspell.convert.extern
|
|||||||
|
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
|
|
||||||
import docspell.common.exec.ExternalCommand
|
import docspell.common.SystemCommand
|
||||||
|
|
||||||
case class TesseractConfig(command: ExternalCommand, workingDir: Path)
|
case class TesseractConfig(command: SystemCommand.Config, workingDir: Path)
|
||||||
|
@@ -10,6 +10,7 @@ import cats.effect._
|
|||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import fs2.io.file.{Files, Path}
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import docspell.common._
|
||||||
import docspell.convert.ConversionResult
|
import docspell.convert.ConversionResult
|
||||||
import docspell.convert.ConversionResult.Handler
|
import docspell.convert.ConversionResult.Handler
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
@@ -21,15 +22,14 @@ object Unoconv {
|
|||||||
chunkSize: Int,
|
chunkSize: Int,
|
||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
|
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
|
||||||
val reader: (Path, Int) => F[ConversionResult[F]] =
|
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] =
|
||||||
ExternConv.readResult[F](chunkSize, logger)
|
ExternConv.readResult[F](chunkSize, logger)
|
||||||
val cmd = cfg.command.withVars(Map.empty)
|
|
||||||
|
|
||||||
ExternConv.toPDF[F, A](
|
ExternConv.toPDF[F, A](
|
||||||
"unoconv",
|
"unoconv",
|
||||||
cmd,
|
cfg.command,
|
||||||
cfg.workingDir,
|
cfg.workingDir,
|
||||||
useStdin = false,
|
false,
|
||||||
logger,
|
logger,
|
||||||
reader
|
reader
|
||||||
)(
|
)(
|
||||||
@@ -37,4 +37,5 @@ object Unoconv {
|
|||||||
handler
|
handler
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -8,6 +8,6 @@ package docspell.convert.extern
|
|||||||
|
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
|
|
||||||
import docspell.common.exec.ExternalCommand
|
import docspell.common.SystemCommand
|
||||||
|
|
||||||
case class UnoconvConfig(command: ExternalCommand, workingDir: Path)
|
case class UnoconvConfig(command: SystemCommand.Config, workingDir: Path)
|
||||||
|
@@ -27,10 +27,10 @@ object Weasyprint {
|
|||||||
sanitizeHtml: SanitizeHtml,
|
sanitizeHtml: SanitizeHtml,
|
||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
|
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
|
||||||
val reader: (Path, Int) => F[ConversionResult[F]] =
|
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] =
|
||||||
ExternConv.readResult[F](chunkSize, logger)
|
ExternConv.readResult[F](chunkSize, logger)
|
||||||
|
|
||||||
val cmdCfg = cfg.command.withVars(Map("encoding" -> charset.name()))
|
val cmdCfg = cfg.command.replace(Map("{{encoding}}" -> charset.name()))
|
||||||
|
|
||||||
// html sanitize should (among other) remove links to invalid
|
// html sanitize should (among other) remove links to invalid
|
||||||
// protocols like cid: which is not supported by further
|
// protocols like cid: which is not supported by further
|
||||||
@@ -51,4 +51,5 @@ object Weasyprint {
|
|||||||
handler
|
handler
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -8,6 +8,6 @@ package docspell.convert.extern
|
|||||||
|
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
|
|
||||||
import docspell.common.exec.ExternalCommand
|
import docspell.common.SystemCommand
|
||||||
|
|
||||||
case class WeasyprintConfig(command: ExternalCommand, workingDir: Path)
|
case class WeasyprintConfig(command: SystemCommand.Config, workingDir: Path)
|
||||||
|
@@ -27,10 +27,10 @@ object WkHtmlPdf {
|
|||||||
sanitizeHtml: SanitizeHtml,
|
sanitizeHtml: SanitizeHtml,
|
||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
|
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
|
||||||
val reader: (Path, Int) => F[ConversionResult[F]] =
|
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] =
|
||||||
ExternConv.readResult[F](chunkSize, logger)
|
ExternConv.readResult[F](chunkSize, logger)
|
||||||
|
|
||||||
val cmdCfg = cfg.command.withVars(Map("encoding" -> charset.name()))
|
val cmdCfg = cfg.command.replace(Map("{{encoding}}" -> charset.name()))
|
||||||
|
|
||||||
// html sanitize should (among other) remove links to invalid
|
// html sanitize should (among other) remove links to invalid
|
||||||
// protocols like cid: which is not supported by further
|
// protocols like cid: which is not supported by further
|
||||||
@@ -58,4 +58,5 @@ object WkHtmlPdf {
|
|||||||
handler
|
handler
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -8,6 +8,6 @@ package docspell.convert.extern
|
|||||||
|
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
|
|
||||||
import docspell.common.exec.ExternalCommand
|
import docspell.common.SystemCommand
|
||||||
|
|
||||||
case class WkHtmlPdfConfig(command: ExternalCommand, workingDir: Path)
|
case class WkHtmlPdfConfig(command: SystemCommand.Config, workingDir: Path)
|
||||||
|
@@ -15,7 +15,6 @@ import cats.implicits._
|
|||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.common.exec._
|
|
||||||
import docspell.common.util.File
|
import docspell.common.util.File
|
||||||
import docspell.convert.ConversionResult.Handler
|
import docspell.convert.ConversionResult.Handler
|
||||||
import docspell.convert.ConvertConfig.HtmlConverter
|
import docspell.convert.ConvertConfig.HtmlConverter
|
||||||
@@ -37,7 +36,7 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
|
|||||||
3000 * 3000,
|
3000 * 3000,
|
||||||
MarkdownConfig("body { padding: 2em 5em; }"),
|
MarkdownConfig("body { padding: 2em 5em; }"),
|
||||||
WkHtmlPdfConfig(
|
WkHtmlPdfConfig(
|
||||||
ExternalCommand(
|
SystemCommand.Config(
|
||||||
"wkhtmltopdf",
|
"wkhtmltopdf",
|
||||||
Seq("-s", "A4", "--encoding", "UTF-8", "-", "{{outfile}}"),
|
Seq("-s", "A4", "--encoding", "UTF-8", "-", "{{outfile}}"),
|
||||||
Duration.seconds(20)
|
Duration.seconds(20)
|
||||||
@@ -45,7 +44,7 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
|
|||||||
target
|
target
|
||||||
),
|
),
|
||||||
WeasyprintConfig(
|
WeasyprintConfig(
|
||||||
ExternalCommand(
|
SystemCommand.Config(
|
||||||
"weasyprint",
|
"weasyprint",
|
||||||
Seq("--encoding", "UTF-8", "-", "{{outfile}}"),
|
Seq("--encoding", "UTF-8", "-", "{{outfile}}"),
|
||||||
Duration.seconds(20)
|
Duration.seconds(20)
|
||||||
@@ -54,7 +53,7 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
|
|||||||
),
|
),
|
||||||
HtmlConverter.Wkhtmltopdf,
|
HtmlConverter.Wkhtmltopdf,
|
||||||
TesseractConfig(
|
TesseractConfig(
|
||||||
ExternalCommand(
|
SystemCommand.Config(
|
||||||
"tesseract",
|
"tesseract",
|
||||||
Seq("{{infile}}", "out", "-l", "deu", "pdf", "txt"),
|
Seq("{{infile}}", "out", "-l", "deu", "pdf", "txt"),
|
||||||
Duration.seconds(20)
|
Duration.seconds(20)
|
||||||
@@ -62,7 +61,7 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
|
|||||||
target
|
target
|
||||||
),
|
),
|
||||||
UnoconvConfig(
|
UnoconvConfig(
|
||||||
ExternalCommand(
|
SystemCommand.Config(
|
||||||
"unoconv",
|
"unoconv",
|
||||||
Seq("-f", "pdf", "-o", "{{outfile}}", "{{infile}}"),
|
Seq("-f", "pdf", "-o", "{{outfile}}", "{{infile}}"),
|
||||||
Duration.seconds(20)
|
Duration.seconds(20)
|
||||||
@@ -70,8 +69,8 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
|
|||||||
target
|
target
|
||||||
),
|
),
|
||||||
OcrMyPdfConfig(
|
OcrMyPdfConfig(
|
||||||
enabled = true,
|
true,
|
||||||
ExternalCommand(
|
SystemCommand.Config(
|
||||||
"ocrmypdf",
|
"ocrmypdf",
|
||||||
Seq(
|
Seq(
|
||||||
"-l",
|
"-l",
|
||||||
@@ -87,7 +86,7 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
|
|||||||
),
|
),
|
||||||
target
|
target
|
||||||
),
|
),
|
||||||
ConvertConfig.DecryptPdf(enabled = true, Nil)
|
ConvertConfig.DecryptPdf(true, Nil)
|
||||||
)
|
)
|
||||||
|
|
||||||
val conversion =
|
val conversion =
|
||||||
|
@@ -14,7 +14,6 @@ import cats.effect.unsafe.implicits.global
|
|||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
|
|
||||||
import docspell.common._
|
import docspell.common._
|
||||||
import docspell.common.exec._
|
|
||||||
import docspell.common.util.File
|
import docspell.common.util.File
|
||||||
import docspell.convert._
|
import docspell.convert._
|
||||||
import docspell.files.ExampleFiles
|
import docspell.files.ExampleFiles
|
||||||
@@ -28,7 +27,7 @@ class ExternConvTest extends FunSuite with FileChecks with TestLoggingConfig {
|
|||||||
val target = File.path(Paths.get("target"))
|
val target = File.path(Paths.get("target"))
|
||||||
|
|
||||||
test("convert html to pdf") {
|
test("convert html to pdf") {
|
||||||
val cfg = ExternalCommand(
|
val cfg = SystemCommand.Config(
|
||||||
"wkhtmltopdf",
|
"wkhtmltopdf",
|
||||||
Seq("-s", "A4", "--encoding", "UTF-8", "-", "{{outfile}}"),
|
Seq("-s", "A4", "--encoding", "UTF-8", "-", "{{outfile}}"),
|
||||||
Duration.seconds(20)
|
Duration.seconds(20)
|
||||||
@@ -54,7 +53,7 @@ class ExternConvTest extends FunSuite with FileChecks with TestLoggingConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test("convert office to pdf") {
|
test("convert office to pdf") {
|
||||||
val cfg = ExternalCommand(
|
val cfg = SystemCommand.Config(
|
||||||
"unoconv",
|
"unoconv",
|
||||||
Seq("-f", "pdf", "-o", "{{outfile}}", "{{infile}}"),
|
Seq("-f", "pdf", "-o", "{{outfile}}", "{{infile}}"),
|
||||||
Duration.seconds(20)
|
Duration.seconds(20)
|
||||||
@@ -81,7 +80,7 @@ class ExternConvTest extends FunSuite with FileChecks with TestLoggingConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test("convert image to pdf") {
|
test("convert image to pdf") {
|
||||||
val cfg = ExternalCommand(
|
val cfg = SystemCommand.Config(
|
||||||
"tesseract",
|
"tesseract",
|
||||||
Seq("{{infile}}", "out", "-l", "deu", "pdf", "txt"),
|
Seq("{{infile}}", "out", "-l", "deu", "pdf", "txt"),
|
||||||
Duration.seconds(20)
|
Duration.seconds(20)
|
||||||
@@ -106,4 +105,5 @@ class ExternConvTest extends FunSuite with FileChecks with TestLoggingConfig {
|
|||||||
)
|
)
|
||||||
.unsafeRunSync()
|
.unsafeRunSync()
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -10,8 +10,7 @@ import cats.effect._
|
|||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import fs2.io.file.{Files, Path}
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
import docspell.common.exec.ExternalCommand
|
import docspell.common._
|
||||||
import docspell.common.exec.SysExec
|
|
||||||
import docspell.common.util.File
|
import docspell.common.util.File
|
||||||
import docspell.logging.Logger
|
import docspell.logging.Logger
|
||||||
|
|
||||||
@@ -78,17 +77,14 @@ object Ocr {
|
|||||||
else cfg.ghostscript.command.args
|
else cfg.ghostscript.command.args
|
||||||
val cmd = cfg.ghostscript.command
|
val cmd = cfg.ghostscript.command
|
||||||
.copy(args = xargs)
|
.copy(args = xargs)
|
||||||
.withVars(
|
.replace(
|
||||||
Map(
|
Map(
|
||||||
"infile" -> "-",
|
"{{infile}}" -> "-",
|
||||||
"outfile" -> "%d.tif"
|
"{{outfile}}" -> "%d.tif"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.resolved
|
SystemCommand
|
||||||
|
.execSuccess(cmd, logger, wd = Some(wd), stdin = pdf)
|
||||||
Stream
|
|
||||||
.resource(SysExec(cmd, logger, Some(wd), Some(pdf)))
|
|
||||||
.evalMap(_.runToSuccess(logger))
|
|
||||||
.flatMap(_ => File.listFiles(pathEndsWith(".tif"), wd))
|
.flatMap(_ => File.listFiles(pathEndsWith(".tif"), wd))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -97,22 +93,18 @@ object Ocr {
|
|||||||
*/
|
*/
|
||||||
private[extract] def runGhostscriptFile[F[_]: Async: Files](
|
private[extract] def runGhostscriptFile[F[_]: Async: Files](
|
||||||
pdf: Path,
|
pdf: Path,
|
||||||
ghostscript: ExternalCommand,
|
ghostscript: SystemCommand.Config,
|
||||||
wd: Path,
|
wd: Path,
|
||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
): Stream[F, Path] = {
|
): Stream[F, Path] = {
|
||||||
val cmd = ghostscript
|
val cmd = ghostscript.replace(
|
||||||
.withVars(
|
Map(
|
||||||
Map(
|
"{{infile}}" -> pdf.absolute.toString,
|
||||||
"infile" -> pdf.absolute.toString,
|
"{{outfile}}" -> "%d.tif"
|
||||||
"outfile" -> "%d.tif"
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
.resolved
|
)
|
||||||
|
SystemCommand
|
||||||
Stream
|
.execSuccess[F](cmd, logger, wd = Some(wd))
|
||||||
.resource(SysExec(cmd, logger, Some(wd)))
|
|
||||||
.evalMap(_.runToSuccess(logger))
|
|
||||||
.flatMap(_ => File.listFiles(pathEndsWith(".tif"), wd))
|
.flatMap(_ => File.listFiles(pathEndsWith(".tif"), wd))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -124,23 +116,19 @@ object Ocr {
|
|||||||
*/
|
*/
|
||||||
private[extract] def runUnpaperFile[F[_]: Async](
|
private[extract] def runUnpaperFile[F[_]: Async](
|
||||||
img: Path,
|
img: Path,
|
||||||
unpaper: ExternalCommand,
|
unpaper: SystemCommand.Config,
|
||||||
wd: Option[Path],
|
wd: Option[Path],
|
||||||
logger: Logger[F]
|
logger: Logger[F]
|
||||||
): Stream[F, Path] = {
|
): Stream[F, Path] = {
|
||||||
val targetFile = img.resolveSibling("u-" + img.fileName.toString).absolute
|
val targetFile = img.resolveSibling("u-" + img.fileName.toString).absolute
|
||||||
val cmd = unpaper
|
val cmd = unpaper.replace(
|
||||||
.withVars(
|
Map(
|
||||||
Map(
|
"{{infile}}" -> img.absolute.toString,
|
||||||
"infile" -> img.absolute.toString,
|
"{{outfile}}" -> targetFile.toString
|
||||||
"outfile" -> targetFile.toString
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
.resolved
|
)
|
||||||
|
SystemCommand
|
||||||
Stream
|
.execSuccess[F](cmd, logger, wd = wd)
|
||||||
.resource(SysExec(cmd, logger, wd))
|
|
||||||
.evalMap(_.runToSuccess(logger))
|
|
||||||
.map(_ => targetFile)
|
.map(_ => targetFile)
|
||||||
.handleErrorWith { th =>
|
.handleErrorWith { th =>
|
||||||
logger
|
logger
|
||||||
@@ -162,14 +150,12 @@ object Ocr {
|
|||||||
// so use the parent as working dir
|
// so use the parent as working dir
|
||||||
runUnpaperFile(img, config.unpaper.command, img.parent, logger).flatMap { uimg =>
|
runUnpaperFile(img, config.unpaper.command, img.parent, logger).flatMap { uimg =>
|
||||||
val cmd = config.tesseract.command
|
val cmd = config.tesseract.command
|
||||||
.withVars(
|
.replace(
|
||||||
Map("file" -> uimg.fileName.toString, "lang" -> fixLanguage(lang))
|
Map("{{file}}" -> uimg.fileName.toString, "{{lang}}" -> fixLanguage(lang))
|
||||||
)
|
)
|
||||||
.resolved
|
SystemCommand
|
||||||
|
.execSuccess[F](cmd, logger, wd = uimg.parent)
|
||||||
Stream
|
.map(_.stdout)
|
||||||
.resource(SysExec(cmd, logger, uimg.parent))
|
|
||||||
.evalMap(_.runToSuccessStdout(logger))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Run tesseract on the given image file and return the extracted text. */
|
/** Run tesseract on the given image file and return the extracted text. */
|
||||||
@@ -180,12 +166,8 @@ object Ocr {
|
|||||||
config: OcrConfig
|
config: OcrConfig
|
||||||
): Stream[F, String] = {
|
): Stream[F, String] = {
|
||||||
val cmd = config.tesseract.command
|
val cmd = config.tesseract.command
|
||||||
.withVars(Map("file" -> "stdin", "lang" -> fixLanguage(lang)))
|
.replace(Map("{{file}}" -> "stdin", "{{lang}}" -> fixLanguage(lang)))
|
||||||
.resolved
|
SystemCommand.execSuccess(cmd, logger, stdin = img).map(_.stdout)
|
||||||
|
|
||||||
Stream
|
|
||||||
.resource(SysExec(cmd, logger, None, Some(img)))
|
|
||||||
.evalMap(_.runToSuccessStdout(logger))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private def fixLanguage(lang: String): String =
|
private def fixLanguage(lang: String): String =
|
||||||
|
@@ -6,9 +6,12 @@
|
|||||||
|
|
||||||
package docspell.extract.ocr
|
package docspell.extract.ocr
|
||||||
|
|
||||||
|
import java.nio.file.Paths
|
||||||
|
|
||||||
import fs2.io.file.Path
|
import fs2.io.file.Path
|
||||||
|
|
||||||
import docspell.common.exec.ExternalCommand
|
import docspell.common._
|
||||||
|
import docspell.common.util.File
|
||||||
|
|
||||||
case class OcrConfig(
|
case class OcrConfig(
|
||||||
maxImageSize: Int,
|
maxImageSize: Int,
|
||||||
@@ -22,10 +25,43 @@ object OcrConfig {
|
|||||||
|
|
||||||
case class PageRange(begin: Int)
|
case class PageRange(begin: Int)
|
||||||
|
|
||||||
case class Ghostscript(command: ExternalCommand, workingDir: Path)
|
case class Ghostscript(command: SystemCommand.Config, workingDir: Path)
|
||||||
|
|
||||||
case class Tesseract(command: ExternalCommand)
|
case class Tesseract(command: SystemCommand.Config)
|
||||||
|
|
||||||
case class Unpaper(command: ExternalCommand)
|
case class Unpaper(command: SystemCommand.Config)
|
||||||
|
|
||||||
|
val default = OcrConfig(
|
||||||
|
maxImageSize = 3000 * 3000,
|
||||||
|
pageRange = PageRange(10),
|
||||||
|
ghostscript = Ghostscript(
|
||||||
|
SystemCommand.Config(
|
||||||
|
"gs",
|
||||||
|
Seq(
|
||||||
|
"-dNOPAUSE",
|
||||||
|
"-dBATCH",
|
||||||
|
"-dSAFER",
|
||||||
|
"-sDEVICE=tiffscaled8",
|
||||||
|
"-sOutputFile={{outfile}}",
|
||||||
|
"{{infile}}"
|
||||||
|
),
|
||||||
|
Duration.seconds(30)
|
||||||
|
),
|
||||||
|
File.path(
|
||||||
|
Paths.get(System.getProperty("java.io.tmpdir")).resolve("docspell-extraction")
|
||||||
|
)
|
||||||
|
),
|
||||||
|
unpaper = Unpaper(
|
||||||
|
SystemCommand
|
||||||
|
.Config("unpaper", Seq("{{infile}}", "{{outfile}}"), Duration.seconds(30))
|
||||||
|
),
|
||||||
|
tesseract = Tesseract(
|
||||||
|
SystemCommand
|
||||||
|
.Config(
|
||||||
|
"tesseract",
|
||||||
|
Seq("{{file}}", "stdout", "-l", "{{lang}}"),
|
||||||
|
Duration.minutes(1)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
@@ -6,14 +6,9 @@
|
|||||||
|
|
||||||
package docspell.extract.ocr
|
package docspell.extract.ocr
|
||||||
|
|
||||||
import java.nio.file.Paths
|
|
||||||
|
|
||||||
import cats.effect.IO
|
import cats.effect.IO
|
||||||
import cats.effect.unsafe.implicits.global
|
import cats.effect.unsafe.implicits.global
|
||||||
|
|
||||||
import docspell.common.Duration
|
|
||||||
import docspell.common.exec.ExternalCommand
|
|
||||||
import docspell.common.util.File
|
|
||||||
import docspell.files.TestFiles
|
import docspell.files.TestFiles
|
||||||
import docspell.logging.TestLoggingConfig
|
import docspell.logging.TestLoggingConfig
|
||||||
|
|
||||||
@@ -26,7 +21,7 @@ class TextExtractionSuite extends FunSuite with TestLoggingConfig {
|
|||||||
|
|
||||||
test("extract english pdf".ignore) {
|
test("extract english pdf".ignore) {
|
||||||
val text = TextExtract
|
val text = TextExtract
|
||||||
.extract[IO](letterSourceEN, logger, "eng", TextExtractionSuite.defaultConfig)
|
.extract[IO](letterSourceEN, logger, "eng", OcrConfig.default)
|
||||||
.compile
|
.compile
|
||||||
.lastOrError
|
.lastOrError
|
||||||
.unsafeRunSync()
|
.unsafeRunSync()
|
||||||
@@ -36,7 +31,7 @@ class TextExtractionSuite extends FunSuite with TestLoggingConfig {
|
|||||||
test("extract german pdf".ignore) {
|
test("extract german pdf".ignore) {
|
||||||
val expect = TestFiles.letterDEText
|
val expect = TestFiles.letterDEText
|
||||||
val extract = TextExtract
|
val extract = TextExtract
|
||||||
.extract[IO](letterSourceDE, logger, "deu", TextExtractionSuite.defaultConfig)
|
.extract[IO](letterSourceDE, logger, "deu", OcrConfig.default)
|
||||||
.compile
|
.compile
|
||||||
.lastOrError
|
.lastOrError
|
||||||
.unsafeRunSync()
|
.unsafeRunSync()
|
||||||
@@ -44,37 +39,3 @@ class TextExtractionSuite extends FunSuite with TestLoggingConfig {
|
|||||||
assertEquals(extract.value, expect)
|
assertEquals(extract.value, expect)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
object TextExtractionSuite {
|
|
||||||
val defaultConfig = OcrConfig(
|
|
||||||
maxImageSize = 3000 * 3000,
|
|
||||||
pageRange = OcrConfig.PageRange(10),
|
|
||||||
ghostscript = OcrConfig.Ghostscript(
|
|
||||||
ExternalCommand(
|
|
||||||
"gs",
|
|
||||||
Seq(
|
|
||||||
"-dNOPAUSE",
|
|
||||||
"-dBATCH",
|
|
||||||
"-dSAFER",
|
|
||||||
"-sDEVICE=tiffscaled8",
|
|
||||||
"-sOutputFile={{outfile}}",
|
|
||||||
"{{infile}}"
|
|
||||||
),
|
|
||||||
Duration.seconds(30)
|
|
||||||
),
|
|
||||||
File.path(
|
|
||||||
Paths.get(System.getProperty("java.io.tmpdir")).resolve("docspell-extraction")
|
|
||||||
)
|
|
||||||
),
|
|
||||||
unpaper = OcrConfig.Unpaper(
|
|
||||||
ExternalCommand("unpaper", Seq("{{infile}}", "{{outfile}}"), Duration.seconds(30))
|
|
||||||
),
|
|
||||||
tesseract = OcrConfig.Tesseract(
|
|
||||||
ExternalCommand(
|
|
||||||
"tesseract",
|
|
||||||
Seq("{{file}}", "stdout", "-l", "{{lang}}"),
|
|
||||||
Duration.minutes(1)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
@@ -19,7 +19,7 @@ import munit._
|
|||||||
|
|
||||||
class ZipTest extends CatsEffectSuite with TestLoggingConfig {
|
class ZipTest extends CatsEffectSuite with TestLoggingConfig {
|
||||||
val logger = docspell.logging.getLogger[IO]
|
val logger = docspell.logging.getLogger[IO]
|
||||||
val tempDir = ResourceFunFixture(
|
val tempDir = ResourceFixture(
|
||||||
Files[IO].tempDirectory(Path("target").some, "zip-test-", None)
|
Files[IO].tempDirectory(Path("target").some, "zip-test-", None)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -201,7 +201,6 @@ object FtsRepository extends DoobieMeta {
|
|||||||
case Language.Czech => "simple"
|
case Language.Czech => "simple"
|
||||||
case Language.Latvian => "simple"
|
case Language.Latvian => "simple"
|
||||||
case Language.Japanese => "simple"
|
case Language.Japanese => "simple"
|
||||||
case Language.JpnVert => "simple"
|
|
||||||
case Language.Hebrew => "simple"
|
case Language.Hebrew => "simple"
|
||||||
case Language.Lithuanian => "simple"
|
case Language.Lithuanian => "simple"
|
||||||
case Language.Polish => "simple"
|
case Language.Polish => "simple"
|
||||||
|
@@ -45,7 +45,7 @@ object SolrMigration {
|
|||||||
description,
|
description,
|
||||||
FtsMigration.Result.reIndexAll.pure[F]
|
FtsMigration.Result.reIndexAll.pure[F]
|
||||||
),
|
),
|
||||||
dataChangeOnly = true
|
true
|
||||||
)
|
)
|
||||||
|
|
||||||
def indexAll[F[_]: Applicative](
|
def indexAll[F[_]: Applicative](
|
||||||
@@ -59,7 +59,7 @@ object SolrMigration {
|
|||||||
description,
|
description,
|
||||||
FtsMigration.Result.indexAll.pure[F]
|
FtsMigration.Result.indexAll.pure[F]
|
||||||
),
|
),
|
||||||
dataChangeOnly = true
|
true
|
||||||
)
|
)
|
||||||
|
|
||||||
def apply[F[_]: Functor](
|
def apply[F[_]: Functor](
|
||||||
@@ -74,6 +74,6 @@ object SolrMigration {
|
|||||||
description,
|
description,
|
||||||
task.map(_ => FtsMigration.Result.workDone)
|
task.map(_ => FtsMigration.Result.workDone)
|
||||||
),
|
),
|
||||||
dataChangeOnly = false
|
false
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@@ -299,22 +299,14 @@ object SolrSetup {
|
|||||||
Map("add-field" -> body.asJson).asJson
|
Map("add-field" -> body.asJson).asJson
|
||||||
|
|
||||||
def string(field: Field): AddField =
|
def string(field: Field): AddField =
|
||||||
AddField(field, "string", stored = true, indexed = true, multiValued = false)
|
AddField(field, "string", true, true, false)
|
||||||
|
|
||||||
def textGeneral(field: Field): AddField =
|
def textGeneral(field: Field): AddField =
|
||||||
AddField(field, "text_general", stored = true, indexed = true, multiValued = false)
|
AddField(field, "text_general", true, true, false)
|
||||||
|
|
||||||
def textLang(field: Field, lang: Language): AddField =
|
def textLang(field: Field, lang: Language): AddField =
|
||||||
if (lang == Language.Czech)
|
if (lang == Language.Czech) AddField(field, s"text_cz", true, true, false)
|
||||||
AddField(field, s"text_cz", stored = true, indexed = true, multiValued = false)
|
else AddField(field, s"text_${lang.iso2}", true, true, false)
|
||||||
else
|
|
||||||
AddField(
|
|
||||||
field,
|
|
||||||
s"text_${lang.iso2}",
|
|
||||||
stored = true,
|
|
||||||
indexed = true,
|
|
||||||
multiValued = false
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
case class DeleteField(name: Field)
|
case class DeleteField(name: Field)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user