Compare commits

266 Commits

Author SHA1 Message Date
32cc994b30 Merge branch 'master' into helm-chart 2024-07-30 10:36:41 +02:00
f500cebab4 Update dependency tailwindcss to v3.4.7 2024-07-25 20:38:44 +00:00
4c786d4893 Update dependency postcss to v8.4.40 2024-07-25 01:37:28 +00:00
57a0a52b0b Update alpine Docker tag to v3.20.2 2024-07-23 05:26:11 +00:00
c584c397f0 Merge pull request #2708 from eikek/update/mariadb-java-client-3.4.1
Update mariadb-java-client to 3.4.1
2024-07-18 06:21:49 +00:00
723241f17f Update mariadb-java-client to 3.4.1 2024-07-18 06:13:28 +00:00
54afdeb934 Update dependency @fortawesome/fontawesome-free to v6.6.0 2024-07-17 02:11:37 +00:00
a7510c02f5 Update dependency tailwindcss to v3.4.6 2024-07-16 22:29:42 +00:00
9306467583 Update dependency tailwindcss to v3.4.5 2024-07-15 22:23:29 +00:00
68caf20e24 Merge pull request #2703 from eikek/update/circe-yaml-0.15.3
Update circe-yaml to 0.15.3
2024-07-12 06:20:39 +00:00
e178a7359f Update circe-yaml to 0.15.3 2024-07-12 06:12:25 +00:00
b871803415 Merge pull request #2702 from eikek/update/jsoup-1.18.1
Update jsoup to 1.18.1
2024-07-11 06:20:58 +00:00
191357f249 Update jsoup to 1.18.1 2024-07-11 06:12:47 +00:00
294b04e590 Merge pull request #2699 from ivanbrennan/nix-secure-config
Nix module: secure config file
2024-07-08 09:59:52 +02:00
f0f8d907df Bump braces from 3.0.2 to 3.0.3 in /modules/webapp (#2684)
Bumps [braces](https://github.com/micromatch/braces) from 3.0.2 to 3.0.3.
- [Changelog](https://github.com/micromatch/braces/blob/master/CHANGELOG.md)
- [Commits](https://github.com/micromatch/braces/compare/3.0.2...3.0.3)

---
updated-dependencies:
- dependency-name: braces
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-07-08 09:42:31 +02:00
9d6cf21819 Merge pull request #2700 from eikek/update/sbt-1.10.1
Update sbt to 1.10.1
2024-07-08 06:22:04 +00:00
f1d6b8efb0 Update sbt to 1.10.1 2024-07-08 06:13:35 +00:00
baf5c682b0 secure nix config
Stop writing docspell config files to the world-readable nix store,
since they contain sensitive info, e.g. database passwords.

Additionally, provide a `configFile` option so users may point to a file
they've secured using their prefered secret management strategy.
2024-07-05 19:16:53 -04:00
f626ee82e6 Update dependency cssnano to v7.0.4 2024-07-05 12:27:19 +00:00
ba06e851bd Update dependency postcss to v8.4.39 2024-06-29 21:51:59 +00:00
4549d62142 Merge pull request #2693 from eikek/update/sbt-sonatype-3.11.0
Update sbt-sonatype to 3.11.0
2024-06-27 06:21:23 +00:00
f4381e8972 Update sbt-sonatype to 3.11.0 2024-06-27 06:12:28 +00:00
f62df12de8 Merge pull request #2691 from eikek/update/circe-yaml-0.15.2
Update circe-yaml to 0.15.2
2024-06-25 06:20:17 +00:00
0228932379 Update circe-yaml to 0.15.2 2024-06-25 06:11:52 +00:00
9a9aaa5d8e Update alpine Docker tag to v3.20.1 2024-06-21 00:09:19 +00:00
80788708a4 Update dependency cssnano to v7.0.3 2024-06-19 14:57:34 +00:00
e0af3d72e9 Merge pull request #2686 from eikek/update/scalafmt-core-3.8.2
Update scalafmt-core to 3.8.2
2024-06-15 06:22:36 +00:00
5fab35ba0d Add 'Reformat with scalafmt 3.8.2' to .git-blame-ignore-revs 2024-06-15 06:12:35 +00:00
1c566cd518 Reformat with scalafmt 3.8.2
Executed command: scalafmt --non-interactive
2024-06-15 06:12:35 +00:00
11c5a3c612 Update scalafmt-core to 3.8.2 2024-06-15 06:12:19 +00:00
57e55a52d6 Merge pull request #2685 from eikek/update/testcontainers-scala-mariadb-0.41.4
Update testcontainers-scala-mariadb, ... to 0.41.4
2024-06-14 06:21:27 +00:00
bfe7ada178 Update testcontainers-scala-mariadb, ... to 0.41.4 2024-06-14 06:13:14 +00:00
94026346c4 Update actions/checkout action to v4.1.7 2024-06-12 22:50:21 +00:00
4a6412904a Merge pull request #2680 from eikek/update/pureconfig-0.17.7
Update pureconfig, pureconfig-ip4s to 0.17.7
2024-06-10 06:22:14 +00:00
021c98c523 Update pureconfig, pureconfig-ip4s to 0.17.7 2024-06-10 06:13:06 +00:00
3c3aa103fa Merge pull request #2679 from eikek/update/imageio-jpeg-3.11.0
Update imageio-jpeg, imageio-tiff to 3.11.0
2024-06-09 06:20:18 +00:00
cd42f33a6d Update imageio-jpeg, imageio-tiff to 3.11.0 2024-06-09 06:12:12 +00:00
5d1f49b279 Merge pull request #2677 from eikek/update/scala-java-time-2.6.0
Update scala-java-time to 2.6.0
2024-06-08 06:19:48 +00:00
86bbc8298d Update scala-java-time to 2.6.0 2024-06-08 06:11:39 +00:00
62f26bbf59 Update dependency tailwindcss to v3.4.4 2024-06-05 22:27:34 +00:00
1e2d46c643 Update dependency cssnano to v7.0.2 2024-06-05 12:59:57 +00:00
a88e5af64b Merge pull request #2674 from eikek/update/scribe-3.15.0
Update scribe, scribe-slf4j2 to 3.15.0
2024-06-05 06:21:36 +00:00
297977f1aa Update scribe, scribe-slf4j2 to 3.15.0 2024-06-05 06:13:23 +00:00
81aba411a4 Merge pull request #2671 from eikek/update/scribe-3.14.0
Update scribe, scribe-slf4j2 to 3.14.0
2024-06-03 06:21:35 +00:00
768c9f71a8 Update scribe, scribe-slf4j2 to 3.14.0 2024-06-03 06:13:14 +00:00
67cfcb275c Adding CJK and Custom Mapping Documentation (#2669) 2024-06-02 20:57:36 +02:00
3621d3d9b4 Add Japanese Mapping for OCR Optimization (#2668) 2024-06-02 10:37:20 +02:00
f991d6018e Update release-drafter.yml 2024-05-31 09:01:58 +02:00
8131b444ff Merge pull request #2670 from eikek/update/swagger-ui-5.17.14
Update swagger-ui to 5.17.14
2024-05-31 06:21:28 +00:00
f77142899d Update swagger-ui to 5.17.14 2024-05-31 06:13:09 +00:00
faff4308bd Add release drafter config 2024-05-30 21:06:54 +02:00
2aad27791a Update dependency flag-icons to v7.2.3 2024-05-29 15:05:37 +00:00
2fe49fa2b1 Merge pull request #2664 from eikek/update-munit
Update munit to 1.0.0, munit-cats-effect to 2.0.0
2024-05-28 19:22:48 +00:00
b7f53c78d8 Merge pull request #2663 from eikek/fix/2629-collect-output-default
Honor default value `true` for `collectOutput`
2024-05-28 19:15:27 +00:00
9c910d262e Update munit to 1.0.0, munit-cats-effect to 2.0.0 2024-05-28 21:13:44 +02:00
fd2b897f2f Honor default value true for collectOutput
Fixes #2629
2024-05-28 20:56:18 +02:00
9f3f21f627 Merge pull request #2662 from eikek/update/calev-core-0.7.3
Update calev-circe, calev-core, calev-fs2 to 0.7.3
2024-05-28 06:21:38 +00:00
b20e466e43 Update calev-circe, calev-core, calev-fs2 to 0.7.3 2024-05-28 06:12:52 +00:00
2bbeec677a Merge pull request #2660 from eikek/fix/2650-addon-extract
FIx extracting addons with only a single file
2024-05-27 18:56:56 +00:00
2ca492d6cb Unwrap single directory after unzip is complete 2024-05-27 20:39:49 +02:00
62bd9844dd Fix test for non-empty sub directory
Fixes: #2650
2024-05-27 20:39:26 +02:00
870bfd9cf0 Merge pull request #2659 from eikek/fix-command-mappings-config
Move arg-mappings underneath `command` section
2024-05-27 16:02:26 +00:00
172513ce38 Move arg-mappings underneath command section
The argument mappings are part of the command configuration
2024-05-27 17:53:13 +02:00
523022988a Merge pull request #2657 from eikek/update/swagger-ui-5.17.11
Update swagger-ui to 5.17.11
2024-05-24 21:31:44 +00:00
7cdef1915f Merge pull request #2655 from eikek/update/scodec-bits-1.2.0
Update scodec-bits to 1.2.0
2024-05-24 21:26:11 +00:00
a3ad4479cf Merge pull request #2653 from eikek/update/mariadb-java-client-3.4.0
Update mariadb-java-client to 3.4.0
2024-05-24 21:25:54 +00:00
c819735de5 Merge pull request #2651 from eikek/update/sourcecode-0.4.2
Update sourcecode to 0.4.2
2024-05-24 21:25:38 +00:00
22a55e75a9 Merge pull request #2652 from eikek/update/flyway-core-10.13.0
Update flyway-core, ... to 10.13.0
2024-05-24 21:25:24 +00:00
e5758847f6 Update swagger-ui to 5.17.11 2024-05-24 21:16:17 +00:00
ffeffedfbf Update scodec-bits to 1.2.0 2024-05-24 21:16:05 +00:00
7cd96bc59e Update mariadb-java-client to 3.4.0 2024-05-24 21:15:58 +00:00
f315ad32c0 Update flyway-core, ... to 10.13.0 2024-05-24 21:15:55 +00:00
9c8d290fa9 Update sourcecode to 0.4.2 2024-05-24 21:15:51 +00:00
d0681a12e3 Merge pull request #2646 from VTimofeenko/add-logout-url
Add logout-url option to Nix module
2024-05-24 23:05:46 +02:00
a2ae339870 Add auth.on-account-source-conflict 2024-05-24 13:28:44 -07:00
bec485de0b Add logout-url option to Nix module
Closes #2643
2024-05-22 17:18:18 -07:00
3a43ad408c chore(deps): update alpine docker tag to v3.20.0 2024-05-22 23:07:10 +00:00
f635181091 chore(deps): update dependency flag-icons to v7.2.2 2024-05-22 07:01:23 +00:00
bdf5c54ac9 chore(deps): update actions/checkout action to v4.1.6 2024-05-17 04:30:01 +00:00
a90cec4e7b chore(deps): update cachix/install-nix-action action to v27 2024-05-16 02:55:47 +00:00
92589ee2ed Merge pull request #2635 from eikek/update/scribe-3.13.5
Update scribe, scribe-slf4j2 to 3.13.5
2024-05-10 06:34:43 +00:00
997fb60508 Update scribe, scribe-slf4j2 to 3.13.5 2024-05-10 06:26:37 +00:00
021f5a183e chore(deps): update postgres docker tag to v16.3 2024-05-10 04:51:20 +00:00
7969e0daa8 chore(deps): update actions/checkout action to v4.1.5 2024-05-09 00:39:19 +00:00
c935f6b4fc Merge pull request #2631 from eikek/update/jwt-circe-10.0.1
Update jwt-circe to 10.0.1
2024-05-07 06:35:58 +00:00
2834513d92 Update jwt-circe to 10.0.1 2024-05-07 06:27:31 +00:00
66136b7d0f Merge pull request #2630 from VTimofeenko/feat-add-package-option-to-nix-modules
Add package option to Nix modules
2024-05-06 19:57:35 +02:00
004add0dd1 Add package option to Nix modules
"Package" option allows specifying the derivation that will be used for
the systemd service. Works the same way as `services.<name>.package` in
NixOS. By default picks the docspell packages from pkgs instance -- same
behavior as prior to this commit

Closes #2627
2024-05-06 10:22:00 -07:00
adf3a9045e Merge pull request #2628 from eikek/update/io-1.10.0
Update io, sbt to 1.10.0
2024-05-06 06:36:00 +00:00
6896ea0866 Update io, sbt to 1.10.0 2024-05-06 06:27:50 +00:00
3a73eb7948 Merge pull request #2626 from eikek/update/http4s-circe-0.23.27
Update http4s-circe, http4s-dsl, ... to 0.23.27
2024-05-04 06:34:06 +00:00
409ea99b17 Update http4s-circe, http4s-dsl, ... to 0.23.27 2024-05-04 06:25:28 +00:00
f670c5bace Merge pull request #2625 from eikek/update/scala-library-2.13.14
Update scala-library to 2.13.14
2024-05-02 06:36:11 +00:00
a5c8f5d1a8 Merge pull request #2624 from eikek/update/sbt-scalafix-0.12.1
Update sbt-scalafix to 0.12.1
2024-05-02 06:35:43 +00:00
d44f4e7cfa Update scala-library to 2.13.14 2024-05-02 06:27:08 +00:00
775841b7de Update sbt-scalafix to 0.12.1 2024-05-02 06:27:03 +00:00
45c4bf84a1 Merge pull request #2622 from eikek/update/flyway-core-10.12.0
Update flyway-core, ... to 10.12.0
2024-04-30 06:35:38 +00:00
1485e688d3 Update flyway-core, ... to 10.12.0 2024-04-30 06:27:25 +00:00
77f186f320 chore(deps): update dependency cssnano to v7.0.1 2024-04-26 17:28:23 +00:00
95d4393421 Merge pull request #2619 from eikek/update/swagger-ui-5.17.2
Update swagger-ui to 5.17.2
2024-04-26 06:35:57 +00:00
2b4975fc08 Update swagger-ui to 5.17.2 2024-04-26 06:27:57 +00:00
ed86bfe182 chore(deps): update dependency tailwindcss to v3.4.3 2024-04-25 17:45:56 +00:00
f93a6701db chore(deps): update actions/checkout action to v4.1.4 2024-04-25 17:30:15 +00:00
9ada40c634 Merge pull request #2613 from ChanceHarrison/actual-master
docs(development.md): Fix minor typos
2024-04-25 15:30:10 +02:00
2f154146cb Merge remote-tracking branch 'origin/current-docs' 2024-04-25 14:12:28 +02:00
08a71b1bad Add page to website about contributing to docs (#2612) 2024-04-25 14:11:10 +02:00
893386b281 docs(development.md): Fix minor typos 2024-04-25 01:43:04 -07:00
c9f7c685db Merge pull request #2611 from eikek/update/scribe-3.13.4
Update scribe, scribe-slf4j2 to 3.13.4
2024-04-25 06:34:35 +00:00
c9b1720aa5 Update scribe, scribe-slf4j2 to 3.13.4 2024-04-25 06:26:35 +00:00
dcc25805fd Update dependency cssnano to v7 2024-04-25 03:38:18 +00:00
dfe0d8e7bc Merge pull request #2606 from eikek/update/swagger-ui-5.17.0
Update swagger-ui to 5.17.0
2024-04-24 06:37:01 +00:00
e8dada8720 Update swagger-ui to 5.17.0 2024-04-24 06:28:53 +00:00
2a5b7fab12 Update actions/checkout action to v4.1.3 2024-04-22 18:06:44 +00:00
6f46521a28 Update dependency @fontsource/montserrat to v5.0.18 2024-04-20 21:57:25 +00:00
81f386b7c0 Merge pull request #2601 from eikek/update/stanford-corenlp-4.5.7
Update stanford-corenlp to 4.5.7
2024-04-20 06:31:54 +00:00
788ffab63c Update stanford-corenlp to 4.5.7 2024-04-20 06:24:14 +00:00
df370ba221 Merge pull request #2599 from eikek/update/flyway-core-10.11.1
Update flyway-core, ... to 10.11.1
2024-04-19 06:35:25 +00:00
ffa55b9e51 Merge pull request #2598 from eikek/update/scribe-3.13.3
Update scribe, scribe-slf4j2 to 3.13.3
2024-04-19 06:34:19 +00:00
f96a3e6bb9 Update flyway-core, ... to 10.11.1 2024-04-19 06:26:27 +00:00
1425b7d21a Update scribe, scribe-slf4j2 to 3.13.3 2024-04-19 06:26:23 +00:00
230c80cae8 Merge pull request #2597 from eikek/update/swagger-ui-5.15.2-1
Update swagger-ui to 5.15.2-1
2024-04-18 06:34:13 +00:00
c3a7c1347c Update swagger-ui to 5.15.2-1 2024-04-18 06:26:58 +00:00
c16808a3f5 Merge pull request #2595 from eikek/update/icu4j-75.1
Update icu4j to 75.1
2024-04-17 06:37:01 +00:00
5ea4b5c6f2 Merge pull request #2596 from eikek/update/swagger-ui-5.15.2
Update swagger-ui to 5.15.2
2024-04-17 06:35:57 +00:00
21b1590a1d Update swagger-ui to 5.15.2 2024-04-17 06:28:14 +00:00
0b901ea430 Update icu4j to 75.1 2024-04-17 06:28:07 +00:00
e731d822dc Add Japanese Vertical Support Branch for Tesseract and Ocrmypdf OCR (#2505)
* Add Japanese Vertical Support 
* Adds Japanese Vertical mappings to default configuration.
2024-04-16 20:24:57 +02:00
36c00cc9ec Merge pull request #2593 from eikek/update/sourcecode-0.4.1
Update sourcecode to 0.4.1
2024-04-16 06:34:49 +00:00
ca32f24804 Update sourcecode to 0.4.1 2024-04-16 06:25:50 +00:00
5b699fe99d Merge pull request #2591 from eikek/update/sourcecode-0.4.0
Update sourcecode to 0.4.0
2024-04-15 06:34:10 +00:00
fa9c42f4b1 Update sourcecode to 0.4.0 2024-04-15 06:25:10 +00:00
76a55bed7b Merge pull request #2588 from eikek/update/jcl-over-slf4j-2.0.13
Update jcl-over-slf4j to 2.0.13
2024-04-13 06:28:41 +00:00
e6d8a0ca83 Merge pull request #2587 from eikek/update/sbt-native-packager-1.10.0
Update sbt-native-packager to 1.10.0
2024-04-13 06:28:33 +00:00
826930827f Update jcl-over-slf4j to 2.0.13 2024-04-13 06:20:43 +00:00
15b73be1d7 Update sbt-native-packager to 1.10.0 2024-04-13 06:20:39 +00:00
342d4a88df Merge pull request #2586 from eikek/update/swagger-ui-5.15.1
Update swagger-ui to 5.15.1
2024-04-12 06:33:53 +00:00
e97dda23a8 Update swagger-ui to 5.15.1 2024-04-12 06:25:59 +00:00
59182bc38a Merge pull request #2585 from eikek/update/swagger-ui-5.15.0
Update swagger-ui to 5.15.0
2024-04-11 06:33:34 +00:00
9ac3055d20 Update swagger-ui to 5.15.0 2024-04-11 06:25:36 +00:00
a6d1d0e29d Merge pull request #2583 from eikek/update/commons-io-2.16.1
Update commons-io to 2.16.1
2024-04-09 06:35:00 +00:00
9d072f31e0 Update commons-io to 2.16.1 2024-04-09 06:25:47 +00:00
d78b43168f Merge pull request #2579 from eikek/update/tika-core-2.9.2
Update tika-core to 2.9.2
2024-04-03 06:32:53 +00:00
84174056ad Update tika-core to 2.9.2 2024-04-03 06:24:52 +00:00
76523d77b4 Update dependency @fortawesome/fontawesome-free to v6.5.2 2024-04-02 21:36:08 +00:00
7de555c9e6 Merge pull request #2577 from eikek/update/swagger-ui-5.13.0
Update swagger-ui to 5.13.0
2024-03-30 06:35:41 +00:00
a48f311227 Merge pull request #2576 from eikek/update/scalafmt-core-3.8.1
Update scalafmt-core to 3.8.1
2024-03-30 06:32:35 +00:00
231ce0022d Update swagger-ui to 5.13.0 2024-03-30 06:24:48 +00:00
38b4562dc5 Update scalafmt-core to 3.8.1 2024-03-30 06:24:29 +00:00
8001bbde6e Merge pull request #2575 from eikek/update/commons-io-2.16.0
Update commons-io to 2.16.0
2024-03-29 06:33:05 +00:00
e27a4d6662 Merge pull request #2574 from eikek/update/fs2-core-3.10.2
Update fs2-core, fs2-io to 3.10.2
2024-03-29 06:32:54 +00:00
93a320bb72 Update commons-io to 2.16.0 2024-03-29 06:25:04 +00:00
cf4f0738da Update fs2-core, fs2-io to 3.10.2 2024-03-29 06:24:59 +00:00
53efb79cbc Update dependency tailwindcss to v3.4.3 2024-03-28 01:48:09 +00:00
21beefdc39 Update dependency tailwindcss to v3.4.2 2024-03-27 19:32:31 +00:00
7fab54b656 Merge pull request #2569 from eikek/update/swagger-ui-5.12.2
Update swagger-ui to 5.12.2
2024-03-27 06:32:51 +00:00
cdf6a75b4e Update swagger-ui to 5.12.2 2024-03-27 06:24:58 +00:00
e8f2bedecd Update dependency flag-icons to v7.2.1 2024-03-27 01:37:37 +00:00
0b1c924997 Update dependency cssnano to v6.1.2 2024-03-25 21:19:38 +00:00
3132b2af8f Merge pull request #2565 from eikek/update/fs2-core-3.10.1
Update fs2-core, fs2-io to 3.10.1
2024-03-25 06:34:45 +00:00
78d8e7c054 Update fs2-core, fs2-io to 3.10.1 2024-03-25 06:27:04 +00:00
7f39395c1a Merge pull request #2563 from eikek/update/scribe-3.13.2
Update scribe, scribe-slf4j2 to 3.13.2
2024-03-22 06:32:54 +00:00
faf5caffbe Update scribe, scribe-slf4j2 to 3.13.2 2024-03-22 06:25:14 +00:00
8b06a34fe6 Merge pull request #2562 from eikek/update/calev-core-0.7.2
Update calev-circe, calev-core, calev-fs2 to 0.7.2
2024-03-21 06:33:32 +00:00
aaedb45d96 Update dependency postcss to v8.4.38 2024-03-21 06:25:17 +00:00
753db5f9e4 Update calev-circe, calev-core, calev-fs2 to 0.7.2 2024-03-21 06:25:02 +00:00
2357547e70 Update dependency autoprefixer to v10.4.19 2024-03-21 03:04:59 +00:00
fcb986eca2 Update dependency postcss-import to v16.1.0 2024-03-21 02:31:02 +00:00
bfa5510442 Update dependency cssnano to v6.1.1 2024-03-20 21:29:53 +00:00
c2f8abae94 Merge pull request #2556 from eikek/update/sbt-scalajs-1.16.0
Update sbt-scalajs, scalajs-compiler, ... to 1.16.0
2024-03-20 06:39:19 +00:00
8f9e67d2a6 Merge pull request #2555 from eikek/update/scribe-3.13.1
Update scribe, scribe-slf4j2 to 3.13.1
2024-03-20 06:34:38 +00:00
3ef0a02ab1 Merge pull request #2554 from eikek/update/sbt-buildinfo-0.12.0
Update sbt-buildinfo to 0.12.0
2024-03-20 06:33:48 +00:00
2ece300b81 Update sbt-scalajs, scalajs-compiler, ... to 1.16.0 2024-03-20 06:25:58 +00:00
00ada494c4 Update scribe, scribe-slf4j2 to 3.13.1 2024-03-20 06:25:54 +00:00
e1e1e39606 Update sbt-buildinfo to 0.12.0 2024-03-20 06:25:48 +00:00
cd45407c2d Update dependency postcss to v8.4.37 2024-03-19 22:02:02 +00:00
e474257933 Merge pull request #2552 from eikek/update/swagger-ui-5.12.0
Update swagger-ui to 5.12.0
2024-03-19 06:32:40 +00:00
d1c5a077f1 Merge pull request #2551 from eikek/update/fs2-core-3.10.0
Update fs2-core, fs2-io to 3.10.0
2024-03-19 06:32:35 +00:00
2a4f37cc80 Update swagger-ui to 5.12.0 2024-03-19 06:24:49 +00:00
6db2d25e08 Update fs2-core, fs2-io to 3.10.0 2024-03-19 06:24:44 +00:00
cd9b49e4cc Update dependency postcss to v8.4.36 2024-03-18 01:46:46 +00:00
50ce96b8b2 Merge pull request #2544 from eikek/update/pdfbox-3.0.2
Update pdfbox to 3.0.2
2024-03-15 19:31:16 +00:00
f5514fb707 Merge pull request #2541 from eikek/update/swagger-ui-5.11.10
Update swagger-ui to 5.11.10
2024-03-15 19:31:14 +00:00
1af25ce148 Merge pull request #2546 from eikek/update/postgresql-42.7.3
Update postgresql to 42.7.3
2024-03-15 19:31:02 +00:00
19eef35b98 Merge pull request #2545 from eikek/update/flyway-core-10.10.0
Update flyway-core, ... to 10.10.0
2024-03-15 19:30:32 +00:00
1b7ffd4087 Update swagger-ui to 5.11.10 2024-03-15 20:23:26 +01:00
9253783ef0 Update pdfbox to 3.0.2 2024-03-15 20:23:09 +01:00
7a27fbf8fb Update flyway-core, ... to 10.10.0 2024-03-15 20:22:45 +01:00
f0b0906785 Update postgresql to 42.7.3 2024-03-15 20:22:20 +01:00
93b5a3ee72 Merge pull request #2547 from eikek/elm-deps
Fix renamed elm package
2024-03-15 08:32:23 +00:00
c223ba63aa Fix renamed elm package 2024-03-15 09:23:59 +01:00
f6d22523d1 Convert to stale action 2024-03-11 12:00:08 +01:00
67284d1f6a Fix tailwindcss warnings 2024-03-10 21:24:44 +01:00
247fc1d4e9 Merge pull request #2525 from eikek/update/sbt-github-pages-0.14.0
Update sbt-github-pages to 0.14.0
2024-03-10 20:15:06 +00:00
7c2a57966b Update sbt-github-pages to 0.14.0 2024-03-10 21:06:33 +01:00
fd927fa1e7 Merge pull request #2540 from eikek/redocly-tailwind-setup
Redocly tailwind setup
2024-03-10 20:05:40 +00:00
3d93439b28 Lower memory requirement for test-vm 2024-03-10 20:49:56 +01:00
7c123db1a3 Use tailwindcss standalone cli 2024-03-10 20:13:41 +01:00
7b53f3699f Update redocly setup 2024-03-10 19:53:36 +01:00
5715f60e96 Merge pull request #2539 from eikek/nix-refactor
Extend nix setup, including dev environments
2024-03-10 17:06:08 +00:00
ba8435c7dc Disable strict external link checking
This is so brittle, only works sometimes.
2024-03-10 16:58:22 +01:00
8a41ed3fd3 Github actions use nix 2024-03-10 16:58:22 +01:00
3aad3b7be4 Remove other now obsolete nix files 2024-03-10 15:38:17 +01:00
f3f246d798 Rename server -> restserver in nix setup
While I'd like to rename it the other way around, it would be a much
more breaking change. So for now, this way.
2024-03-10 15:37:16 +01:00
8bcc88ed65 Document flake dev setup 2024-03-10 15:37:16 +01:00
2e18274803 Extend nix flake setup 2024-03-10 15:37:16 +01:00
4167b64e31 Update dependency autoprefixer to v10.4.18 2024-03-09 06:35:51 +00:00
55a2d1359e Merge pull request #2537 from eikek/update/kittens-3.3.0
Update kittens to 3.3.0
2024-03-09 06:20:16 +00:00
442e389537 Update kittens to 3.3.0 2024-03-09 06:12:48 +00:00
2ad9e1fa1e Merge pull request #2511 from eikek/renovate/cssnano-6.x-lockfile
Update dependency cssnano to v6.1.0
2024-03-09 01:30:36 +01:00
f7eb913994 Merge pull request #2534 from eikek/renovate/cachix-install-nix-action-26.x
Update cachix/install-nix-action action to v26
2024-03-09 01:30:22 +01:00
443ba47cfb Update cachix/install-nix-action action to v26 2024-03-08 22:02:57 +00:00
95a28afa69 Update dependency cssnano to v6.1.0 2024-03-08 22:02:53 +00:00
9ef934f8b1 Update dependency @fontsource/montserrat to v5.0.17 2024-03-08 22:01:43 +00:00
ca2a2a32d7 Merge branch 'current-docs' 2024-03-08 21:37:26 +01:00
8269a73a83 Extend config for external commands (#2536)
Allows to configure external commands and provide different arguments
based on runtime values, like language. It extends the current config
of a command to allow a `arg-mappings` section. An example for
ocrmypdf:

```conf
ocrmypdf = {
  enabled = true
  command = {
    program = "ocrmypdf"
### new arg-mappings
    arg-mappings = {
      "mylang" = {
        value = "{{lang}}"
        mappings = [
          {
            matches = "deu"
            args = [ "-l", "deu", "--pdf-renderer", "sandwich" ]
          },
          {
            matches = ".*"
            args = [ "-l", "{{lang}}" ]
          }
        ]
      }
    }
#### end new arg-mappings
    args = [
      ### will be replaced with corresponding args from "mylang" mapping
      "{{mylang}}", 
      "--skip-text",
      "--deskew",
      "-j", "1",
      "{{infile}}",
      "{{outfile}}"
    ]
    timeout = "5 minutes"
  }
  working-dir = ${java.io.tmpdir}"/docspell-convert"
}
```

The whole section will be first processed to replace all `{{…}}`
patterns with corresponding values. Then `arg-mappings` will be looked
at and the first match (value == matches) in its `mappings` array is
used to replace its name in the arguments to the command.
2024-03-08 21:34:42 +01:00
572afd2dc1 Fix array definition in config.toml 2024-03-08 21:24:40 +01:00
9c98f08520 Merge pull request #2533 from eikek/update/flyway-core-10.9.1
Update flyway-core, ... to 10.9.1
2024-03-08 06:18:50 +00:00
c9f2ed7185 Update flyway-core, ... to 10.9.1 2024-03-08 06:11:34 +00:00
012ef62b82 Try give more resources to sbt ci jobs 2024-03-07 21:46:14 +01:00
1691909d8f Try give more resources to ci job 2024-03-07 21:35:42 +01:00
500ae92a09 Merge pull request #2527 from eikek/update/http4s-circe-0.23.26
Update http4s-circe, http4s-dsl, ... to 0.23.26
2024-03-06 06:23:18 +00:00
57ecea818c Merge pull request #2528 from eikek/update/cats-effect-3.5.4
Update cats-effect to 3.5.4
2024-03-06 06:22:19 +00:00
0e2bb198ae Merge pull request #2526 from eikek/update/flyway-core-10.9.0
Update flyway-core, ... to 10.9.0
2024-03-06 06:22:16 +00:00
44bc8ac9ff Update cats-effect to 3.5.4 2024-03-06 06:13:31 +00:00
7411766ff0 Update http4s-circe, http4s-dsl, ... to 0.23.26 2024-03-06 06:13:26 +00:00
3d6643e98f Update flyway-core, ... to 10.9.0 2024-03-06 06:13:22 +00:00
551f96dd21 Merge pull request #2515 from eikek/update/scala-library-2.13.13
Update scala-library to 2.13.13
2024-03-03 17:52:13 +00:00
924aaf720e Fix compile warnings after scala update 2024-03-03 18:43:54 +01:00
1d149119ce Merge branch 'current-docs' 2024-03-03 09:47:40 +01:00
cea7948c2e Remove stackoverflow from external link check
It returns FORBIDDEN, so can not be checked it seems
2024-03-03 09:46:15 +01:00
979bdcfeb1 Merge pull request #2523 from tenpai-git/PostgreSQL-Manual-Backup-Documentation
Pushing a minor fix to the bash commands.
2024-03-03 09:45:22 +01:00
7ea9d2e634 Pushing a minor fix to the bash commands. 2024-03-03 17:25:04 +09:00
0d0b150e0f Merge pull request #2522 from eikek/update/sbt-scalafix-0.12.0
Update sbt-scalafix to 0.12.0
2024-03-03 06:20:34 +00:00
d30cc73e53 Update sbt-scalafix to 0.12.0 2024-03-03 06:11:56 +00:00
7b952f3da6 Merge pull request #2519 from eikek/docker-base-image
Change docker base images to 3.19.1
2024-03-02 07:18:37 +00:00
40f4974aca Merge pull request #2520 from eikek/update/swagger-ui-5.11.8
Update swagger-ui to 5.11.8
2024-03-01 06:20:47 +00:00
4200edf675 Update swagger-ui to 5.11.8 2024-03-01 06:12:14 +00:00
0a987f5b66 Change docker base images to 3.19.1
See #2504, alpine edge introduced a version of tesseract that is
problematic to use from within docspell
2024-02-29 21:52:00 +01:00
3e76385d08 Update scala-library to 2.13.13 2024-02-27 06:11:48 +00:00
0bba5d8e02 Merge branch 'current-docs' 2024-02-26 17:39:33 +01:00
d4eeb01c7c Fix link to modheader plugin 2024-02-26 17:36:53 +01:00
10036cd57b Fix build when bloop plugin is present
When using sbt-bloop, the build doesn't compile anymore. The reason
seems to be incomptible `sbt-io` dependencies pulled in from
`sbt-bloop` and `sbt-native-packager` (as well as `sbt-github-pages`).
Interestingly, the build compiles fine if either one of these plugins
is removed. Only together with `sbt-bloop` the build fails to compile.
The workaround is to explicitely pull in the io depenency based on the
sbt version in use.
2024-02-26 10:27:56 +01:00
469fd70959 Merge pull request #2508 from tenpai-git/PostgreSQL-Manual-Backup-Documentation
Add documentation for backup and restore process for PostgreSQL.
2024-02-25 09:40:17 +01:00
620d97bd06 Merge pull request #2500 from nekrondev/master
fix(webapp): downstream keep-alive events to backend preventing timeout
2024-02-24 00:56:13 +01:00
1811d6f974 Merge pull request #2493 from eikek/renovate/postgres-16.x
Update postgres Docker tag to v16.2
2024-02-24 00:55:01 +01:00
b193ecc77a Merge pull request #2501 from TheAnachronism/master
Fix some Kubernetes Kustomize deployment issues
2024-02-24 00:54:43 +01:00
bad82d01a5 Update postgres Docker tag to v16.2 2024-02-23 06:21:58 +00:00
0b51337514 Merge pull request #2512 from eikek/update/pureconfig-0.17.6
Update pureconfig, pureconfig-ip4s to 0.17.6
2024-02-23 06:21:03 +00:00
4f24625be9 Merge pull request #2513 from eikek/update/sbt-1.9.9
Update sbt to 1.9.9
2024-02-23 06:20:58 +00:00
21bbe67b09 Update sbt to 1.9.9 2024-02-23 06:12:50 +00:00
c824962925 Update pureconfig, pureconfig-ip4s to 0.17.6 2024-02-23 06:12:46 +00:00
063a702a94 Also including a minor documentation update for JpnVert 2024-02-23 00:21:52 +09:00
3a69bc5ee0 Adds manual backup and restore documentation for PostgreSQL as the recommended database. 2024-02-21 20:24:34 +09:00
7574dc2916 Merge pull request #2506 from eikek/update/mariadb-java-client-3.3.3
Update mariadb-java-client to 3.3.3
2024-02-21 06:19:08 +00:00
dc2937bc64 Merge pull request #2507 from eikek/update/postgresql-42.7.2
Update postgresql to 42.7.2
2024-02-21 06:19:03 +00:00
16db17d35c Update postgresql to 42.7.2 2024-02-21 06:10:53 +00:00
62f3cefc44 Update mariadb-java-client to 3.3.3 2024-02-21 06:10:49 +00:00
e3faff7acf Started adding different assertions and checks of values, so that less invalid configuration is possible. 2024-02-18 16:45:37 +01:00
ad61dae136 eikek/docspell#2502 Added the option supply existing Kubernetes secrets to load secret values. 2024-02-18 15:42:18 +01:00
ba14d88f9f Merge pull request #2496 from eikek/renovate/postcss-import-16.x-lockfile
Update dependency postcss-import to v16.0.1
2024-02-18 15:09:32 +01:00
53160d34bf eikek/docspell#2502 Removed unused db-secret.yaml and moved some configuration templates into the correct directories. 2024-02-17 23:34:50 +01:00
66dad6a300 Merge branch 'master' into helm-chart 2024-02-17 23:17:58 +01:00
8b0dfaedc1 eikek/docspell#2502 Started work on a helm chart for docspell including postgres and solr. 2024-02-17 23:16:44 +01:00
d29d6adbac fix(webapp): downstream keep-alive events to backend preventing timeout
Updated http4s component fixed idleTimeout between backend and proxy / client that requires now to send keep-alive messages from client to backend to prevent a timeout after 60s.

This resolves #2497.
2024-02-17 12:07:26 +01:00
9220c4205d Update dependency postcss-import to v16.0.1 2024-02-16 06:20:07 +00:00
d4c7766f5a Mention minimum MariaDB version 2024-01-31 19:40:02 +01:00
205 changed files with 5029 additions and 4981 deletions

2
.git-blame-ignore-revs Normal file
View File

@ -0,0 +1,2 @@
# Scala Steward: Reformat with scalafmt 3.8.2
1c566cd5182d41f4cc06040fc347ddb4be617779

42
.github/release-drafter.yml vendored Normal file
View File

@ -0,0 +1,42 @@
name-template: "$RESOLVED_VERSION"
tag-template: "$RESOLVED_VERSION"
template: |
## Whats Changed
$CHANGES
categories:
- title: "🚀 Features"
labels:
- 'feature'
- 'enhancement'
- title: "🐛 Bug Fixes"
labels:
- 'fix'
- 'bug'
- title: "💚 Maintenance"
labels:
- 'chore'
- 'documentation'
- title: "🧱 Dependencies"
labels:
- 'dependencies'
- 'type: dependencies'
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
version-resolver:
major:
labels:
- 'breaking'
minor:
labels:
- 'feature'
- 'enhancement'
patch:
labels:
- 'chore'
- 'documentation'
- 'dependencies'
default: patch
exclude-labels:
- 'skip-changelog'

View File

@ -1,6 +1,6 @@
{ {
"automerge": true, "automerge": true,
"labels": ["type: dependencies"], "labels": ["dependencies"],
"packageRules": [ "packageRules": [
{ {
"matchManagers": [ "matchManagers": [

16
.github/stale.yml vendored
View File

@ -1,16 +0,0 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 30
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
onlyLabels:
- question
# Label to use when marking an issue as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not
had recent activity. It will be closed if no further activity
occurs. This only applies to 'question' issues. Always feel free to
reopen or create new issues. Thank you!
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false

View File

@ -6,20 +6,13 @@ on:
- "master" - "master"
jobs: jobs:
check-website: check-website:
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4.1.1 - uses: actions/checkout@v4.1.7
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v27
- name: Set current version - name: Set current version
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
- uses: jorelali/setup-elm@v5
with:
elm-version: 0.19.1
- uses: cachix/install-nix-action@v25
with:
nix_path: nixpkgs=channel:nixos-23.05
- name: Print nixpkgs version
run: nix-instantiate --eval -E '(import <nixpkgs> {}).lib.version'
- name: Build website (${{ env.DOCSPELL_VERSION }}) - name: Build website (${{ env.DOCSPELL_VERSION }})
run: nix-shell website/shell.nix --run "sbt make-website" run: nix develop .#ci --command sbt make-website

View File

@ -5,30 +5,18 @@ on:
- master - master
jobs: jobs:
ci-matrix: ci-matrix:
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
strategy: strategy:
fail-fast: false fail-fast: false
matrix:
java: [ 'openjdk@1.17' ]
steps: steps:
- uses: actions/checkout@v4.1.1 - uses: actions/checkout@v4.1.7
with: with:
fetch-depth: 100 fetch-depth: 100
- uses: jorelali/setup-elm@v5
with:
elm-version: 0.19.1
- uses: bahmutov/npm-install@v1
with:
working-directory: modules/webapp
- name: Fetch tags - name: Fetch tags
run: git fetch --depth=100 origin +refs/tags/*:refs/tags/* run: git fetch --depth=100 origin +refs/tags/*:refs/tags/*
- uses: olafurpg/setup-scala@v14 - uses: cachix/install-nix-action@v27
with:
java-version: ${{ matrix.java }}
# - name: Coursier cache
# uses: coursier/cache-action@v6
- name: sbt ci ${{ github.ref }} - name: sbt ci ${{ github.ref }}
run: sbt ci run: nix develop .#ci --command sbt ci
ci: ci:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
needs: [ci-matrix] needs: [ci-matrix]

View File

@ -4,9 +4,9 @@ on:
types: [ published ] types: [ published ]
jobs: jobs:
docker-images: docker-images:
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4.1.1 - uses: actions/checkout@v4.1.7
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Set current version - name: Set current version

14
.github/workflows/release-drafter.yml vendored Normal file
View File

@ -0,0 +1,14 @@
name: Release Drafter
on:
push:
branches:
- master
jobs:
update_release_draft:
runs-on: ubuntu-latest
steps:
- uses: release-drafter/release-drafter@v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -5,32 +5,20 @@ on:
- "master" - "master"
jobs: jobs:
release-nightly: release-nightly:
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
strategy: strategy:
fail-fast: true fail-fast: true
matrix:
java: [ 'openjdk@1.17' ]
steps: steps:
- uses: actions/checkout@v4.1.1 - uses: actions/checkout@v4.1.7
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: olafurpg/setup-scala@v14 - uses: cachix/install-nix-action@v27
with:
java-version: ${{ matrix.java }}
- uses: jorelali/setup-elm@v5
with:
elm-version: 0.19.1
- uses: bahmutov/npm-install@v1
with:
working-directory: modules/webapp
# - name: Coursier cache
# uses: coursier/cache-action@v6
- name: Set current version - name: Set current version
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
- name: sbt ci ${{ github.ref }} - name: sbt ci ${{ github.ref }}
run: sbt ci run: nix develop .#ci --command sbt ci
- name: sbt make-pkg (${{ env.DOCSPELL_VERSION }}) - name: sbt make-pkg (${{ env.DOCSPELL_VERSION }})
run: sbt make-pkg run: nix develop .#ci --command sbt make-pkg
- uses: "marvinpinto/action-automatic-releases@latest" - uses: "marvinpinto/action-automatic-releases@latest"
with: with:
repo_token: "${{ secrets.GITHUB_TOKEN }}" repo_token: "${{ secrets.GITHUB_TOKEN }}"

View File

@ -5,30 +5,18 @@ on:
- 'v*' - 'v*'
jobs: jobs:
release: release:
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
strategy: strategy:
fail-fast: true fail-fast: true
matrix:
java: [ 'openjdk@1.17' ]
steps: steps:
- uses: actions/checkout@v4.1.1 - uses: actions/checkout@v4.1.7
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: olafurpg/setup-scala@v14 - uses: cachix/install-nix-action@v27
with:
java-version: ${{ matrix.java }}
- uses: jorelali/setup-elm@v5
with:
elm-version: 0.19.1
- uses: bahmutov/npm-install@v1
with:
working-directory: modules/webapp
# - name: Coursier cache
# uses: coursier/cache-action@v6
- name: Set current version - name: Set current version
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
- name: sbt make-pkg (${{ env.DOCSPELL_VERSION }}) - name: sbt make-pkg (${{ env.DOCSPELL_VERSION }})
run: sbt make-pkg run: nix develop .#ci --command sbt make-pkg
- uses: meeDamian/github-release@2.0 - uses: meeDamian/github-release@2.0
with: with:
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}

21
.github/workflows/stale.yml vendored Normal file
View File

@ -0,0 +1,21 @@
name: 'Handle stale issues'
on:
schedule:
- cron: '30 1 * * *'
jobs:
stale:
runs-on: ubuntu-latest
steps:
# https://github.com/actions/stale
- uses: actions/stale@v9
with:
days-before-stale: 30
days-before-close: 7
only-labels: question
stale-issue-label: stale
stale-issue-message: >
This issue has been automatically marked as stale because it has not
had recent activity. It will be closed if no further activity
occurs. This only applies to 'question' issues. Always feel free to
reopen or create new issues. Thank you!

View File

@ -5,24 +5,17 @@ on:
- "current-docs" - "current-docs"
jobs: jobs:
publish-website: publish-website:
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4.1.1 - uses: actions/checkout@v4.1.7
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v27
- name: Set current version - name: Set current version
run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV run: echo "DOCSPELL_VERSION=$(cat version.sbt | grep version | cut -d= -f2 | xargs)" >> $GITHUB_ENV
- uses: jorelali/setup-elm@v5
with:
elm-version: 0.19.1
- uses: cachix/install-nix-action@v25
with:
nix_path: nixpkgs=channel:nixos-23.05
- name: Print nixpkgs version
run: nix-instantiate --eval -E '(import <nixpkgs> {}).lib.version'
- name: Build website (${{ env.DOCSPELL_VERSION }}) - name: Build website (${{ env.DOCSPELL_VERSION }})
run: nix-shell website/shell.nix --run "sbt make-website" run: nix develop .#ci --command sbt make-website
- name: Publish website (${{ env.DOCSPELL_VERSION }}) - name: Publish website (${{ env.DOCSPELL_VERSION }})
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: sbt publish-website run: nix develop .#ci --command sbt publish-website

1
.gitignore vendored
View File

@ -1,4 +1,5 @@
#artwork/*.png #artwork/*.png
.envrc
target/ target/
local/ local/
node_modules/ node_modules/

View File

@ -6,7 +6,7 @@ pull_request_rules:
assign: assign:
users: [eikek] users: [eikek]
label: label:
add: ["type: dependencies"] add: ["dependencies"]
- name: automatically merge Scala Steward PRs on CI success - name: automatically merge Scala Steward PRs on CI success
conditions: conditions:
- author=eikek-scala-steward[bot] - author=eikek-scala-steward[bot]

View File

@ -1,4 +1,4 @@
version = "3.7.17" version = "3.8.2"
preset = default preset = default
align.preset = some align.preset = some

View File

@ -1020,7 +1020,7 @@ Additionally there are some other minor features and bug fixes.
to be able to add a request header. Check [this for to be able to add a request header. Check [this for
firefox](https://addons.mozilla.org/en-US/firefox/addon/modheader-firefox/) firefox](https://addons.mozilla.org/en-US/firefox/addon/modheader-firefox/)
or [this for or [this for
chromium](https://chrome.google.com/webstore/detail/modheader/idgpnmonknjnojddfkpgkljpfnnfcklj) chromium](https://chromewebstore.google.com/detail/modheader-modify-http-hea/idgpnmonknjnojddfkpgkljpfnnfcklj)
- then add the request header `Docspell-Ui` with value `1`. - then add the request header `Docspell-Ui` with value `1`.
Reloading the page gets you back the old ui. Reloading the page gets you back the old ui.
- With new Web-UI, certain features and fixes were realized, but not - With new Web-UI, certain features and fixes were realized, but not

View File

@ -15,11 +15,14 @@ val scalafixSettings = Seq(
val sharedSettings = Seq( val sharedSettings = Seq(
organization := "com.github.eikek", organization := "com.github.eikek",
scalaVersion := "2.13.12", scalaVersion := "2.13.14",
organizationName := "Eike K. & Contributors", organizationName := "Eike K. & Contributors",
licenses += ("AGPL-3.0-or-later", url( licenses += (
"https://spdx.org/licenses/AGPL-3.0-or-later.html" "AGPL-3.0-or-later",
)), url(
"https://spdx.org/licenses/AGPL-3.0-or-later.html"
)
),
startYear := Some(2020), startYear := Some(2020),
headerLicenseStyle := HeaderLicenseStyle.SpdxSyntax, headerLicenseStyle := HeaderLicenseStyle.SpdxSyntax,
headerSources / excludeFilter := HiddenFileFilter || "*.java" || "StringUtil.scala", headerSources / excludeFilter := HiddenFileFilter || "*.java" || "StringUtil.scala",
@ -677,7 +680,11 @@ val restapi = project
openapiTargetLanguage := Language.Scala, openapiTargetLanguage := Language.Scala,
openapiPackage := Pkg("docspell.restapi.model"), openapiPackage := Pkg("docspell.restapi.model"),
openapiSpec := (Compile / resourceDirectory).value / "docspell-openapi.yml", openapiSpec := (Compile / resourceDirectory).value / "docspell-openapi.yml",
openapiStaticGen := OpenApiDocGenerator.Redoc openapiStaticGen := OpenApiDocGenerator.Redoc,
openapiRedoclyCmd := Seq("redocly-cli"),
openapiRedoclyConfig := Some(
(LocalRootProject / baseDirectory).value / "project" / "redocly.yml"
)
) )
.dependsOn(common, query.jvm, notificationApi, jsonminiq, addonlib) .dependsOn(common, query.jvm, notificationApi, jsonminiq, addonlib)
@ -697,7 +704,11 @@ val joexapi = project
openapiTargetLanguage := Language.Scala, openapiTargetLanguage := Language.Scala,
openapiPackage := Pkg("docspell.joexapi.model"), openapiPackage := Pkg("docspell.joexapi.model"),
openapiSpec := (Compile / resourceDirectory).value / "joex-openapi.yml", openapiSpec := (Compile / resourceDirectory).value / "joex-openapi.yml",
openapiStaticGen := OpenApiDocGenerator.Redoc openapiStaticGen := OpenApiDocGenerator.Redoc,
openapiRedoclyCmd := Seq("redocly-cli"),
openapiRedoclyConfig := Some(
(LocalRootProject / baseDirectory).value / "project" / "redocly.yml"
)
) )
.dependsOn(common, loggingScribe, addonlib) .dependsOn(common, loggingScribe, addonlib)

View File

@ -109,7 +109,7 @@ services:
- restserver - restserver
db: db:
image: postgres:16.1 image: postgres:16.3
container_name: postgres_db container_name: postgres_db
restart: unless-stopped restart: unless-stopped
volumes: volumes:

View File

@ -1,4 +1,4 @@
FROM alpine:20231219 FROM alpine:3.20.2
ARG version= ARG version=
ARG joex_url= ARG joex_url=
@ -77,7 +77,7 @@ RUN \
wget https://github.com/tesseract-ocr/tessdata/raw/main/khm.traineddata && \ wget https://github.com/tesseract-ocr/tessdata/raw/main/khm.traineddata && \
mv khm.traineddata /usr/share/tessdata mv khm.traineddata /usr/share/tessdata
# Using these data files for japanese, because they work better. See #973 # Using these data files for japanese, because they work better. Includes vertical data. See #973 and #2445.
RUN \ RUN \
wget https://raw.githubusercontent.com/tesseract-ocr/tessdata_fast/master/jpn_vert.traineddata && \ wget https://raw.githubusercontent.com/tesseract-ocr/tessdata_fast/master/jpn_vert.traineddata && \
wget https://raw.githubusercontent.com/tesseract-ocr/tessdata_fast/master/jpn.traineddata && \ wget https://raw.githubusercontent.com/tesseract-ocr/tessdata_fast/master/jpn.traineddata && \

View File

@ -1,4 +1,4 @@
FROM alpine:20231219 FROM alpine:3.20.2
ARG version= ARG version=
ARG restserver_url= ARG restserver_url=

130
flake.lock generated Normal file
View File

@ -0,0 +1,130 @@
{
"nodes": {
"devshell-tools": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1710099997,
"narHash": "sha256-WmBKTLdth6I/D+0//9enbIXohGsBjepbjIAm9pCYj0U=",
"owner": "eikek",
"repo": "devshell-tools",
"rev": "e82faf976d318b3829f6f7f6785db6f3c7b65267",
"type": "github"
},
"original": {
"owner": "eikek",
"repo": "devshell-tools",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1709126324,
"narHash": "sha256-q6EQdSeUZOG26WelxqkmR7kArjgWCdw5sfJVHPH/7j8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "d465f4819400de7c8d874d50b982301f28a84605",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1709126324,
"narHash": "sha256-q6EQdSeUZOG26WelxqkmR7kArjgWCdw5sfJVHPH/7j8=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "d465f4819400de7c8d874d50b982301f28a84605",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1709309926,
"narHash": "sha256-VZFBtXGVD9LWTecGi6eXrE0hJ/mVB3zGUlHImUs2Qak=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "79baff8812a0d68e24a836df0a364c678089e2c7",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-23.11",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1709677081,
"narHash": "sha256-tix36Y7u0rkn6mTm0lA45b45oab2cFLqAzDbJxeXS+c=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "880992dcc006a5e00dd0591446fdf723e6a51a64",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-23.11",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"devshell-tools": "devshell-tools",
"flake-utils": "flake-utils_2",
"nixpkgs": "nixpkgs_2"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

193
flake.nix Normal file
View File

@ -0,0 +1,193 @@
{
description = "Docspell";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11";
devshell-tools.url = "github:eikek/devshell-tools";
flake-utils.url = "github:numtide/flake-utils";
};
outputs = {
self,
nixpkgs,
flake-utils,
devshell-tools,
}:
flake-utils.lib.eachDefaultSystem (system: let
pkgs = nixpkgs.legacyPackages.${system};
sbt17 = pkgs.sbt.override {jre = pkgs.jdk17;};
ciPkgs = with pkgs; [
sbt17
jdk17
dpkg
elmPackages.elm
fakeroot
zola
yarn
nodejs
redocly-cli
tailwindcss
];
devshellPkgs =
ciPkgs
++ (with pkgs; [
jq
scala-cli
netcat
wget
which
inotifyTools
]);
docspellPkgs = pkgs.callPackage (import ./nix/pkg.nix) {};
dockerAmd64 = pkgs.pkgsCross.gnu64.callPackage (import ./nix/docker.nix) {
inherit (docspellPkgs) docspell-restserver docspell-joex;
};
dockerArm64 = pkgs.pkgsCross.aarch64-multiplatform.callPackage (import ./nix/docker.nix) {
inherit (docspellPkgs) docspell-restserver docspell-joex;
};
in {
formatter = pkgs.alejandra;
packages = {
inherit (docspellPkgs) docspell-restserver docspell-joex;
};
legacyPackages = {
docker = {
amd64 = {
inherit (dockerAmd64) docspell-restserver docspell-joex;
};
arm64 = {
inherit (dockerArm64) docspell-restserver docspell-joex;
};
};
};
checks = {
build-server = self.packages.${system}.docspell-restserver;
build-joex = self.packages.${system}.docspell-joex;
test = with import (nixpkgs + "/nixos/lib/testing-python.nix")
{
inherit system;
};
makeTest {
name = "docspell";
nodes = {
machine = {...}: {
nixpkgs.overlays = [self.overlays.default];
imports = [
self.nixosModules.default
./nix/checks
];
};
};
testScript = builtins.readFile ./nix/checks/testScript.py;
};
};
devShells = {
dev-cnt = pkgs.mkShellNoCC {
buildInputs =
(builtins.attrValues devshell-tools.legacyPackages.${system}.cnt-scripts)
++ devshellPkgs;
DOCSPELL_ENV = "dev";
DEV_CONTAINER = "docsp-dev";
SBT_OPTS = "-Xmx2G -Xss4m";
};
dev-vm = pkgs.mkShellNoCC {
buildInputs =
(builtins.attrValues devshell-tools.legacyPackages.${system}.vm-scripts)
++ devshellPkgs;
DOCSPELL_ENV = "dev";
SBT_OPTS = "-Xmx2G -Xss4m";
DEV_VM = "dev-vm";
VM_SSH_PORT = "10022";
};
ci = pkgs.mkShellNoCC {
buildInputs = ciPkgs;
SBT_OPTS = "-Xmx2G -Xss4m";
};
};
})
// {
nixosModules = {
default = {...}: {
imports = [
./nix/modules/server.nix
./nix/modules/joex.nix
];
};
server = import ./nix/modules/server.nix;
joex = import ./nix/modules/joex.nix;
};
overlays.default = final: prev: let
docspellPkgs = final.callPackage (import ./nix/pkg.nix) {};
in {
inherit (docspellPkgs) docspell-restserver docspell-joex;
};
nixosConfigurations = {
test-vm = devshell-tools.lib.mkVm {
system = "x86_64-linux";
modules = [
self.nixosModules.default
{
nixpkgs.overlays = [self.overlays.default];
}
./nix/test-vm.nix
];
};
docsp-dev = devshell-tools.lib.mkContainer {
system = "x86_64-linux";
modules = [
{
services.dev-postgres = {
enable = true;
databases = ["docspell"];
};
services.dev-email.enable = true;
services.dev-minio.enable = true;
services.dev-solr = {
enable = true;
cores = ["docspell"];
};
}
];
};
dev-vm = devshell-tools.lib.mkVm {
system = "x86_64-linux";
modules = [
{
networking.hostName = "dev-vm";
virtualisation.memorySize = 2048;
services.dev-postgres = {
enable = true;
databases = ["docspell"];
};
services.dev-email.enable = true;
services.dev-minio.enable = true;
services.dev-solr = {
enable = true;
cores = ["docspell"];
heap = 512;
};
port-forward.ssh = 10022;
port-forward.dev-postgres = 6534;
port-forward.dev-smtp = 10025;
port-forward.dev-imap = 10143;
port-forward.dev-webmail = 8080;
port-forward.dev-minio-api = 9000;
port-forward.dev-minio-console = 9001;
port-forward.dev-solr = 8983;
}
];
};
};
};
}

1
kubernetes/helm/docspell/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
charts/

View File

@ -0,0 +1,23 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@ -0,0 +1,6 @@
dependencies:
- name: postgresql
repository: oci://registry-1.docker.io/bitnamicharts
version: 14.0.5
digest: sha256:9a9fa6721983b212cf90cfaff02c7c001423c19d339a1a77ca59f157b1ce3ff5
generated: "2024-02-16T12:52:41.783414782+01:00"

View File

@ -0,0 +1,19 @@
apiVersion: v2
name: docspell
description: Chart for eikek's Docspell document management system
icon: https://raw.githubusercontent.com/eikek/docspell/master/artwork/logo-only.svg
version: 0.0.1
appVersion: v0.41.0
sources:
- "https://github.com/eikek/docspell/"
maintainers:
- name: TheAnachronism
dependencies:
- name: postgresql
repository: oci://registry-1.docker.io/bitnamicharts
version: 14.0.5
condition: postgresql.enabled

View File

@ -0,0 +1,39 @@
{{/*Postgres Access*/}}
{{- define "postgresql.jdbcUrl" -}}
{{- if (index .Values "postgresql").enabled -}}
{{- $port := .Values.postgresql.global.postgresql.service.postgresql | toString -}}
{{- $database := .Values.postgresql.global.postgresql.auth.database -}}
{{- printf "jdbc:postgresql://%s-postgresql:%s/%s" .Release.Name $port $database -}}
{{- end -}}
{{- end -}}
{{/*JDBC Connection*/}}
{{- define "docspell.secrets.JDBC" -}}
{{- if .context.Values.postgresql.enabled -}}
{{- $envPrefix := "DOCSPELL_SERVER_BACKEND_JDBC" -}}
{{- if eq .type "joex" -}}
{{- $envPrefix = "DOCSPELL_JOEX_JDBC" -}}
{{- end }}
{{ $envPrefix }}_USER: {{ .context.Values.postgresql.global.postgresql.auth.username }}
{{- if not .context.Values.postgresql.global.postgresql.auth.existingSecret }}
{{ $envPrefix }}_PASSWORD: {{ .context.Values.postgresql.global.postgresql.auth.password }}
{{- end }}
{{ $envPrefix }}_URL: {{ include "postgresql.jdbcUrl" .context }}
{{- end -}}
{{- end -}}
{{/*Full Text Search ettings*/}}
{{- define "docspell.config.fullTextSearch" -}}
{{- if .context.Values.docspell.fullTextSearch.enabled -}}
{{- $envPrefix := "DOCSPELL_SERVER_FULL__TEXT__SEARCH" -}}
{{- if eq .type "joex" -}}
{{- $envPrefix = "DOCSPELL_JOEX_FULL__TEXT__SEARCH" -}}
{{- end -}}
{{ printf "%s_ENABLED: %s" $envPrefix ( .context.Values.docspell.fullTextSearch.enabled | quote) }}
{{ printf "%s_SOLR_URL: http://%s-solr:%s/solr/docspell" $envPrefix (include "docspell.fullname" .context) ( .context.Values.solr.service.port | toString )}}
{{ printf "%s_SOLR_COMMIT__WITHIN: %s" $envPrefix ( .context.Values.docspell.fullTextSearch.solr.commitWithin | quote) }}
{{ printf "%s_SOLR_LOG__VERBOSE: %s" $envPrefix ( .context.Values.docspell.fullTextSearch.solr.logVerbose | quote ) }}
{{ printf "%s_SOLR_DEF__TYPE: %s" $envPrefix ( .context.Values.docspell.fullTextSearch.solr.defType | quote) }}
{{ printf "%s_SOLR_Q_OP: %s" $envPrefix ( .context.Values.docspell.fullTextSearch.solr.qOp | quote) }}
{{- end }}
{{- end }}

View File

@ -0,0 +1,60 @@
{{/*Expand the name of the chart.*/}}
{{- define "docspell.name" -}}
{{- .Chart.Name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "docspell.fullname" -}}
{{- $name := .Chart.Name }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{/*Create chart name and version as used by the chart label.*/}}
{{- define "docspell.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*Docker Image Registry Secret Names evaluating values as templates*/}}
{{- define "docspell.images.pullSecrets" -}}
{{- $pullSecrets := .Values.global.imagePullSecrets -}}
{{- range .Values.global.imagePullSecrets -}}
{{- $pullSecrets = append $pullSecrets (dict "name" .) -}}
{{- end -}}
{{- if (not (empty $pullSecrets)) -}}
imagePullSecrets:
{{ toYaml $pullSecrets }}
{{- end -}}
{{- end -}}
{{/*Common labels*/}}
{{- define "docspell.labels" -}}
helm.sh/chart: {{ include "docspell.chart" . }}
{{ include "docspell.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*Selector labels*/}}
{{- define "docspell.selectorLabels" -}}
app.kubernetes.io/name: {{ include "docspell.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*Create the name of the service account to use*/}}
{{- define "docspell.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "docspell.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end -}}
{{- end -}}

View File

@ -0,0 +1,43 @@
{{- if .Values.ingress.enabled -}}
{{- $fullname := include "docspell.fullname" . -}}
{{- $context := . -}}
apiVersion: {{ default "networking.k8s.io/v1" .Values.ingress.apiVersion }}
kind: Ingress
metadata:
name: {{ $fullname }}
labels:
{{- include "docspell.labels" . | nindent 4 }}
annotations:
{{- range $key, $value := .Values.ingress.annotations }}
{{ $key }}: {{ $value | quote }}
{{- end }}
spec:
{{- if .Values.ingress.className }}
ingressClassName: {{ .Values.ingress.className }}
{{- end }}
{{- if .Values.ingress.tls }}
tls:
{{- range .Values.ingress.tls }}
- host:
{{- range .hosts }}
- {{ tpl . $ | quote }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
{{- range .Values.ingress.hosts }}
- host: {{ tpl .host $ | quote }}
http:
paths:
{{- range .paths }}
- path: {{ .path }}
pathType: {{ .pathType }}
backend:
service:
name: {{ include "docspell.fullname" $context }}-restserver
port:
name: http
{{- end }}
{{- end }}
{{- end }}

View File

@ -0,0 +1,125 @@
{{/*App ID*/}}
{{- define "docspell.joex.config.appId" -}}
{{- $appId := .Values.docspell.joex.appId | default (printf "%s-joex" (include "docspell.fullname" .)) -}}
{{- print $appId -}}
{{- end -}}
{{/*
Base URL
*/}}
{{- define "docspell.joex.config.baseUrl" -}}
{{- $service := printf "%s-joex" (include "docspell.fullname" .) -}}
{{- $port := .Values.joex.service.port | toString -}}
{{- printf "http://%s:%s" $service $port -}}
{{- end -}}
{{/*Bind Config*/}}
{{- define "docspell.joex.config.bind" -}}
{{- if not (eq .Values.joex.service.port .Values.docspell.joex.bind.port) -}}
{{- fail "Joex and it's service don't have to use the same port, no connection will be possible." -}}
{{- end -}}
{{- $envPrefix := "DOCSPELL_JOEX_BIND" -}}
{{ $envPrefix }}_ADDRESS: {{ .Values.docspell.joex.bind.address | quote }}
{{ $envPrefix }}_PORT: {{ .Values.docspell.joex.bind.port | quote }}
{{- end -}}
{{/*Logging Config*/}}
{{- define "docspell.joex.config.logging" -}}
{{- $envPrefix := "DOCSPELL_JOEX_LOGGING" -}}
{{ $envPrefix }}_FORMAT: {{ .Values.docspell.joex.logging.format }}
{{ $envPrefix }}_MINIMUM__LEVEL: {{ .Values.docspell.joex.logging.minimumLevel }}
{{- end -}}
{{/*JDBC Connection*/}}
{{- define "docspell.joex.config.JDBC" -}}
{{- $envPrefix := "DOCSPELL_JOEX_JDBC" -}}
{{ $envPrefix }}_USER: {{ .Values.postgresql.global.postgresql.auth.username }}
{{ $envPrefix }}_PASSWORD: {{ .Values.postgresql.global.postgresql.auth.password }}
{{ $envPrefix }}_URL: {{ include "postgresql.jdbcUrl" . }}
{{- end -}}
{{/*Database Schema Settings*/}}
{{- define "docspell.joex.config.databaseSchema" -}}
{{- $envPrefix := "DOCSPELL_JOEX_DATABASE__SCHEMA" -}}
{{ $envPrefix }}_RUN__MAIN__MIGRATIONS: {{ .Values.docspell.joex.databaseSchema.runMainMigrations | quote }}
{{ $envPrefix }}_RUN__FIXUP__MIGRATIONS: {{ .Values.docspell.joex.databaseSchema.runFixupMigrations | quote }}
{{ $envPrefix }}_REPAIR__SCHEMA: {{ .Values.docspell.joex.databaseSchema.repairSchema | quote }}
{{- end -}}
{{/*Scheduler Settings*/}}
{{- define "docspell.joex.config.scheduler" -}}
{{- $envPrefix := "DOCSPELL_JOEX_SCHEDULER" -}}
{{ $envPrefix }}_NAME: {{ default (include "docspell.joex.config.appId" .) .Values.docspell.joex.scheduler.name }}
{{ $envPrefix }}_POOL__SIZE: {{ .Values.docspell.joex.scheduler.poolSize | quote }}
{{ $envPrefix }}_COUNTING__SCHEME: {{ .Values.docspell.joex.scheduler.countingScheme | quote }}
{{ $envPrefix }}_RETRIES: {{ .Values.docspell.joex.scheduler.retries | quote }}
{{ $envPrefix }}_RETRY__DELAY: {{ .Values.docspell.joex.scheduler.retryDelay | quote }}
{{ $envPrefix }}_LOG__BUFFER__SIZE: {{ .Values.docspell.joex.scheduler.logBufferSize | quote }}
{{ $envPrefix }}_WAKEUP__PERIOD: {{ .Values.docspell.joex.scheduler.wakeupPeriod | quote }}
{{- end -}}
{{/*PeriodScheduler Settings*/}}
{{- define "docspell.joex.config.periodicScheduler" -}}
{{- $envPrefix := "DOCSPELL_JOEX_PERIODIC__SCHEDULER" -}}
{{ $envPrefix }}_NAME: {{ default (include "docspell.joex.config.appId" .) .Values.docspell.joex.periodicScheduler.name }}
{{ $envPrefix }}_WAKEUP__PERIOD: {{ .Values.docspell.joex.periodicScheduler.wakeupPeriod | quote }}
{{- end -}}
{{/*User Tasks Settings*/}}
{{- define "docspell.joex.config.userTasks" -}}
{{- $envPrefix := "DOCSPELL_JOEX_USER__TASKS_SCAN__MAILBOX" -}}
{{ $envPrefix }}_MAX__FOLDERS: {{ .Values.docspell.joex.userTasks.scanMailbox.maxFolders | quote }}
{{ $envPrefix }}_MAIL__CHUNK__SIZE: {{ .Values.docspell.joex.userTasks.scanMailbox.mailChunkSize | quote }}
{{ $envPrefix }}_MAX__MAILS: {{ .Values.docspell.joex.userTasks.scanMailbox.maxMails | quote }}
{{- end -}}
{{/*House Keeping Settings*/}}
{{- define "docspell.joex.config.houseKeeping" -}}
{{- $envPrefix := "DOCSPELL_JOEX_HOUSE__KEEPING" -}}
{{ $envPrefix }}_SCHEDULE: {{ .Values.docspell.joex.houseKeeping.schedule | quote }}
{{ $envPrefix }}_CLEANUP__INVITES_ENABLED: {{ .Values.docspell.joex.houseKeeping.cleanupInvites.enabled | quote }}
{{ $envPrefix }}_CLEANUP__INVITES_OLDER__THAN: {{ .Values.docspell.joex.houseKeeping.cleanupInvites.olderThan | quote }}
{{ $envPrefix }}_CLEANUP__REMEMBER__ME_ENABLED: {{ .Values.docspell.joex.houseKeeping.cleanupRememberMe.enabled | quote }}
{{ $envPrefix }}_CLEANUP__REMEMBER__ME_OLDER__THAN: {{ .Values.docspell.joex.houseKeeping.cleanupRememberMe.olderThan | quote }}
{{ $envPrefix }}_CLEANUP__JOBS_ENABLED: {{ .Values.docspell.joex.houseKeeping.cleanupJobs.enabled | quote }}
{{ $envPrefix }}_CLEANUP__JOBS_OLDER__THAN: {{ .Values.docspell.joex.houseKeeping.cleanupJobs.olderThan | quote }}
{{ $envPrefix }}_CLEANUP__JOBS_DELETE__BATCH: {{ .Values.docspell.joex.houseKeeping.cleanupJobs.deleteBatch | quote }}
{{ $envPrefix }}_CLEANUP__DOWNLOADS_ENABLED: {{ .Values.docspell.joex.houseKeeping.cleanupDownloads.enabled | quote }}
{{ $envPrefix }}_CLEANUP__DOWNLOADS_OLDER__THAN: {{ .Values.docspell.joex.houseKeeping.cleanupDownloads.olderThan | quote }}
{{ $envPrefix }}_CLEANUP__NODES_ENABLED: {{ .Values.docspell.joex.houseKeeping.cleanupNodes.enabled | quote }}
{{ $envPrefix }}_CLEANUP__NODES_MIN__NOT__FOUND: {{ .Values.docspell.joex.houseKeeping.cleanupNodes.minNotFound |quote }}
{{ $envPrefix }}_INTEGRITY__CHECK_ENABLED: {{ .Values.docspell.joex.houseKeeping.integrityCheck.enabled | quote }}
{{- end -}}
{{/*Update Check Settings*/}}
{{- define "docspell.joex.config.updateCheck" -}}
{{- if and .Values.docspell.joex.updateCheck.enabled (not .Values.docspell.joex.updateCheck.recipients) -}}
{{- fail "Update check recipients have to be set when enabling update check" -}}
{{- end -}}
{{- $envPrefix := "DOCSPELL_JOEX_UPDATE__CHECK" -}}
{{ $envPrefix }}_ENABLED: {{ .Values.docspell.joex.updateCheck.enabled | quote }}
{{ $envPrefix }}_TEST__RUN: {{ .Values.docspell.joex.updateCheck.testRun | quote }}
{{ $envPrefix }}_SCHEDULE: {{ .Values.docspell.joex.updateCheck.schedule | quote }}
{{- if .Values.docspell.joex.updateCheck.senderAccount }}
{{ $envPrefix }}_SENDER__ACOUNT: {{ .Values.docspell.joex.updateCheck.senderAccount }}
{{ $envPrefix }}_SMTP__ID: {{ .Values.docspell.joex.updateCheck.smtpId }}
{{- end }}
{{- range $index, $recipient := .Values.docspell.joex.updateCheck.recipients }}
{{ $envPrefix }}_RECIPIENTS_{{ $index }}: {{ $recipient }}
{{- end }}
{{ $envPrefix }}_SUBJECT: {{ .Values.docspell.joex.updateCheck.subject }}
{{ $envPrefix }}_BODY: | {{ .Values.docspell.joex.updateCheck.body | nindent 4 }}
{{- end -}}
{{/*Convert Settings*/}}
{{- define "docspell.joex.config.convert" -}}
{{- $envPrefix := "DOCSPELL_JOEX_CONVERT" -}}
{{ $envPrefix }}_HTML__CONVERTER: {{ .Values.docspell.joex.convert.htmlConverter }}
{{- end -}}
{{/*Full Text Search Settings*/}}
{{- define "docspell.joex.config.fullTextSearch" -}}
{{- if .Values.docspell.fullTextSearch.enabled -}}
DOCSPELL_JOEX_FULL__TEXT__SEARCH_MIGRATION_INDEX__ALL__CHUNK: {{ .Values.docspell.joex.fullTextSearch.migration.indexAllChink | quote }}
{{- end -}}
{{- end -}}

View File

@ -0,0 +1,10 @@
{{- define "docspell.joex.secrets.existingSecrets" -}}
{{/*PostgreSQL Password*/}}
{{- if .Values.postgresql.global.postgresql.auth.existingSecret -}}
- name: DOCSPELL_JOEX_JDBC_PASSWORD
valueFrom:
secretKeyRef:
name: {{ .Values.postgresql.global.postgresql.auth.existingSecret }}
key: {{ .Values.postgresql.global.postgresql.auth.secretKeys.userPasswordKey | default "password" }}
{{- end -}}
{{- end -}}

View File

@ -0,0 +1,29 @@
{{/*Common labels*/}}
{{- define "joex.labels" -}}
helm.sh/chart: {{ include "docspell.chart" . }}
app: {{ include "docspell.name" . }}-joex
{{ include "joex.selectorLabels" . }}
app.kubernetes.io/version: {{ .Values.joex.image.tag | default .Chart.AppVersion | quote }}
version: {{ .Values.joex.image.tag | default .Chart.AppVersion | quote }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*Selector labels*/}}
{{- define "joex.selectorLabels" -}}
app.kubernetes.io/name: {{ include "docspell.name" . }}-joex
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*Create joex image name and tag used by the deployment*/}}
{{- define "joex.image" -}}
{{- $registry := .Values.global.imageRegistry | default .Values.joex.image.registry -}}
{{- $repository := .Values.joex.image.repository -}}
{{- $separator := ":" -}}
{{- $tag := .Values.joex.image.tag | default .Chart.AppVersion -}}
{{- if $registry -}}
{{- printf "%s/%s%s%s" $registry $repository $separator $tag -}}
{{- else -}}
{{- printf "%s%s%s" $repository $separator $tag -}}
{{- end -}}
{{- end -}}

View File

@ -0,0 +1,23 @@
kind: ConfigMap
apiVersion: v1
metadata:
name: {{ include "docspell.fullname" . }}-joex
labels:
{{- include "joex.labels" . | nindent 4 }}
data:
DOCSPELL_JOEX_APP__ID: {{ include "docspell.joex.config.appId" . }}
DOCSPELL_JOEX_BASE__URL: {{ include "docspell.joex.config.baseUrl" . }}
{{- include "docspell.joex.config.bind" . | nindent 4 }}
{{- include "docspell.joex.config.logging" . | nindent 4 }}
DOCSPELL_JOEX_MAIL__DEBUG: {{ .Values.docspell.joex.mailDebug | quote }}
{{- include "docspell.joex.config.databaseSchema" . | nindent 4 }}
{{- include "docspell.joex.config.scheduler" . | nindent 4 }}
{{- include "docspell.joex.config.periodicScheduler" . | nindent 4 }}
{{- include "docspell.joex.config.userTasks" . | nindent 4 }}
{{- include "docspell.joex.config.houseKeeping" . | nindent 4 }}
{{- include "docspell.joex.config.updateCheck" . | nindent 4 }}
{{- include "docspell.joex.config.convert" . | nindent 4 }}
{{- if .Values.docspell.fullTextSearch.enabled -}}
{{- include "docspell.config.fullTextSearch" (dict "context" . "type" "joex") | nindent 4 }}
{{- include "docspell.joex.config.fullTextSearch" . | nindent 4}}
{{- end }}

View File

@ -0,0 +1,63 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "docspell.fullname" . }}-joex
annotations:
{{- if .Values.joex.deployment.annotations }}
{{- toYaml .Values.joex.deployment.annotations | nindent 4 }}
{{- end }}
labels:
{{- include "joex.labels" . | nindent 4 }}
spec:
replicas: {{ .Values.joex.replicaCount }}
strategy:
type: {{ .Values.joex.strategy.type }}
{{- if eq .Values.joex.strategy.type "RollingUpdate" }}
rollingUpdate:
maxUnavailable: {{ .Values.joex.strategy.rollingUpdate.maxUnavailable }}
maxSurge: {{ .Values.joex.strategy.rollingUpdate.maxSurge }}
{{- end }}
selector:
matchLabels:
{{- include "joex.selectorLabels" . | nindent 6 }}
{{- if .Values.joex.deployment.labels }}
{{- toYaml .Values.joex.deployment.labels | nindent 6 }}
{{- end }}
template:
metadata:
annotations:
checksum/config: {{ include (print $.Template.BasePath "/joex/config.yaml") . | sha256sum }}
checksum/secret: {{ include (print $.Template.BasePath "/joex/secret.yaml") . | sha256sum }}
{{- with .Values.joex.podAnnotations }}
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "joex.labels" . | nindent 8 }}
{{- if .Values.joex.deployment.labels }}
{{- toYaml .Values.joex.deployment.labels | nindent 8 }}
{{- end }}
spec:
{{- if or .Values.serviceAccount.create .Values.serviceAccount.name }}
serviceAccountName: {{ include "docspell.serviceAccountName" . }}
{{- end }}
terminationGracePeriodSeconds: {{ .Values.joex.deployment.terminationGracePeriodSeconds }}
containers:
- name: joex
image: "{{ include "joex.image" . }}"
imagePullPolicy: {{ .Values.joex.image.pullPolicy }}
{{- with .Values.joex.additionalArgs }}
args:
{{- toYaml . | nindent 10 }}
{{- end }}
ports:
- containerPort: {{ .Values.joex.service.port }}
name: http
env:
{{- include "docspell.joex.secrets.existingSecrets" . | nindent 10 }}
envFrom:
- configMapRef:
name: {{ include "docspell.fullname" . }}-joex
- secretRef:
name: {{ include "docspell.fullname" . }}-joex-secret
resources:
{{- toYaml .Values.joex.resources | nindent 12 }}

View File

@ -0,0 +1,9 @@
apiVersion: v1
kind: Secret
metadata:
name: {{ include "docspell.fullname" . }}-joex-secret
labels:
{{- include "joex.labels" . | nindent 4}}
type: Opaque
stringData:
{{- include "docspell.secrets.JDBC" (dict "context" . "type" "joex") | nindent 4 -}}

View File

@ -0,0 +1,20 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "docspell.fullname" . }}-joex
labels:
{{- include "joex.labels" . | nindent 4 }}
{{- if .Values.joex.service.labels }}
{{- toYaml .Values.joex.serivce.labels | nindent 4 }}
{{- end }}
annotations:
{{- toYaml .Values.joex.service.annotations | nindent 4 }}
spec:
type: {{ .Values.joex.service.type }}
ports:
- port: {{ .Values.joex.service.port }}
targetPort: {{ .Values.joex.service.targetPort | default .Values.joex.service.port }}
protocol: TCP
name: http
selector:
{{- include "joex.selectorLabels" . | nindent 6 -}}

View File

@ -0,0 +1,174 @@
{{/*App ID*/}}
{{- define "docspell.server.config.appId" -}}
{{- $appId := .Values.docspell.server.appId | default (printf "%s-restserver" (include "docspell.fullname" .)) -}}
{{- print $appId -}}
{{- end -}}
{{/*Logging Config*/}}
{{- define "docspell.server.config.logging" -}}
{{- $envPrefix := "DOCSPELL_SERVER_LOGGING" -}}
{{ $envPrefix }}_FORMAT: {{ .Values.docspell.server.logging.format }}
{{ $envPrefix }}_MINIMUM__LEVEL: {{ .Values.docspell.server.logging.minimumLevel }}
{{- end -}}
{{/*Bind Config*/}}
{{- define "docspell.server.config.bind" -}}
{{- if not (eq .Values.restserver.service.port .Values.docspell.server.bind.port) -}}
{{- fail "The restserver and it's service don't have to use the same port, no connection will be possible." -}}
{{- end -}}
{{- $envPrefix := "DOCSPELL_SERVER_BIND" -}}
{{ $envPrefix }}_ADDRESS: {{ .Values.docspell.server.bind.address | quote }}
{{ $envPrefix }}_PORT: {{ .Values.docspell.server.bind.port | quote }}
{{- end -}}
{{/*Auth Config*/}}
{{- define "docspell.server.config.auth" -}}
{{- $envPrefix := "DOCSPELL_SERVER_AUTH" -}}
{{ $envPrefix }}_SESSION__VALID: {{ .Values.docspell.server.auth.sessionValid | quote }}
{{ $envPrefix }}_REMEMBER__ME_ENABLED: {{ .Values.docspell.server.auth.rememberMe.enabled | quote }}
{{ $envPrefix }}_REMEMBER__ME_VALID: {{ .Values.docspell.server.auth.rememberMe.valid | quote }}
{{ $envPrefix }}_ON__ACCOUNT__SOURCE__CONFLICT: {{ .Values.docspell.server.auth.onAccountSourceConflict }}
{{- end -}}
{{/*Auth Secrets*/}}
{{- define "docspell.server.secrets.auth" -}}
{{- if .Values.docspell.server.auth.serverSecret -}}
{{- if and .Values.docspell.server.auth.serverSecret.value .Values.docspell.server.auth.serverSecret.existingSecret -}}
{{- fail "Only either a fixed server secret or an existing secret should be specified" -}}
{{- end -}}
{{- with .Values.docspell.server.auth.serverSecret.value }}
DOCSPELL_SERVER_AUTH_SERVER__SECRET: {{ . }}
{{- end -}}
{{- end -}}
{{- end -}}
{{/*Download Config*/}}
{{- define "docspell.server.config.donwload" -}}
{{- $envPrefix := "DOCSPELL_SERVER_DOWNLOAD__ALL" -}}
{{ $envPrefix }}_MAX__FILES: {{ .Values.docspell.server.donwloadAll.maxFiles | quote }}
{{ $envPrefix }}_MAX__SIZE: {{ .Values.docspell.server.donwloadAll.maxSize }}
{{- end -}}
{{/*OpenID Config*/}}
{{- define "docspell.server.config.openid" -}}
{{- $envPrefix := "DOCSPELL_SERVER_OPENID" -}}
{{- range $index, $entry := .Values.docspell.server.openid -}}
{{- if $entry.enabled -}}
{{ $envPrefix }}_{{ $index }}_DISPLAY: {{ $entry.display }}
{{ $envPrefix }}_{{ $index }}_ENABLED: {{ $entry.enabled | quote }}
{{ $envPrefix }}_{{ $index }}_COLLECTIVE__KEY: {{ $entry.collectiveKey }}
{{ $envPrefix }}_{{ $index }}_USER__KEY: {{ $entry.userKey }}
{{- $envPrefix = printf "%s_%s_PROVIDER" $envPrefix ($index | toString) }}
{{ $envPrefix }}_PROVIDER__ID: {{ $entry.provider.providerId }}
{{ $envPrefix }}_SCOPE: {{ $entry.provider.scope }}
{{ $envPrefix }}_AUTHORIZE__URL: {{ $entry.provider.authorizeUrl }}
{{ $envPrefix }}_TOKEN__URL: {{ $entry.provider.tokenUrl }}
{{- with $entry.provider.userUrl }}
{{ $envPrefix }}_USER__URL: {{ . }}
{{- end }}
{{ $envPrefix }}_LOGOUT__URL: {{ $entry.provider.logoutUrl }}
{{ $envPrefix }}_SIG__ALGO: {{ $entry.provider.sigAlgo }}
{{- end -}}
{{- end -}}
{{- end -}}
{{/*OpenID Secrets*/}}
{{- define "docspell.server.secrets.openid" -}}
{{- $envPrefix := "DOCSPELL_SERVER_OPENID" -}}
{{- range $index, $entry := .Values.docspell.server.openid -}}
{{- if and $entry.enabled (not $entry.provider.existingSecret) -}}
{{- $envPrefix = printf "%s_%s_PROVIDER" $envPrefix ($index | toString) }}
{{ $envPrefix }}_CLIENT__ID: {{ $entry.provider.clientId }}
{{ $envPrefix }}_CLIENT__SECRET: {{ $entry.provider.clientSecret }}
{{ $envPrefix }}_SIGN__KEY: {{ $entry.provider.signKey }}
{{- end -}}
{{- end -}}
{{- end -}}
{{/*Database Schema Settings*/}}
{{- define "docspell.server.config.databaseSchema" -}}
{{- $envPrefix := "DOCSPELL_SERVER_BACKEND_DATABASE__SCHEMA" -}}
{{ $envPrefix }}_RUN__MAIN__MIGRATIONS: {{ .Values.docspell.server.backend.databaseSchema.runMainMigrations | quote }}
{{ $envPrefix }}_RUN__FIXUP__MIGRATIONS: {{ .Values.docspell.server.backend.databaseSchema.runFixupMigrations | quote }}
{{ $envPrefix }}_REPAIR__SCHEMA: {{ .Values.docspell.server.backend.databaseSchema.repairSchema | quote }}
{{- end -}}
{{/*Integration Endpoint Settings*/}}
{{- define "docspell.server.config.integrationEndpoint" -}}
{{- $envPrefix := "DOCSPELL_SERVER_INTEGRATION__ENDPOINT" -}}
{{ $envPrefix }}_ENABLED: {{ .Values.docspell.server.integrationEndpoint.enabled | quote }}
{{ $envPrefix }}_PRIORITY: {{ .Values.docspell.server.integrationEndpoint.priority }}
{{ $envPrefix }}_SOURCE__NAME: {{ .Values.docspell.server.integrationEndpoint.sourceName }}
{{- if .Values.docspell.server.integrationEndpoint.allowedIps.enabed }}
{{ $envPrefix }}_ALLOWED__IPS_ENABLED: {{ .Values.docspell.server.integrationEndpoint.allowedIps.enabed }}
{{- range $index, $ip := .Values.docspell.server.integrationEndpoint.allowedIps.ips }}
{{ $envPrefix }}_ALLOWED__IPS_IPS_{{ $index}}: {{ $ip }}
{{- end }}
{{- end }}
{{- if .Values.docspell.server.integrationEndpoint.httpBasic.enabled | quote }}
{{ $envPrefix }}_HTTP__BASIC_ENABLED: {{ .Values.docspell.server.integrationEndpoint.httpBasic.enabled | quote }}
{{- end }}
{{- if .Values.docspell.server.integrationEndpoint.httpHeader.enabled | quote }}
{{ $envPrefix }}_HTTP__HEADER_ENABLED: {{ .Values.docspell.server.integrationEndpoint.httpHeader.enabled | quote }}
{{- end }}
{{- end }}
{{/*Integration Endpoint Secrets*/}}
{{- define "docspell.server.secrets.integrationEndpoint" -}}
{{- if .Values.docspell.server.integrationEndpoint.httpBasic.enabled -}}
{{- if and .Values.docspell.server.integrationEndpoint.httpBasic.credentials .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret -}}
{{- fail "Only either the fixed credentials or an existing secret for the httpBasic integration endpoint should be set" -}}
{{- end -}}
{{- $envPrefix := "DOCSPELL_SERVER_INTEGRATION__ENDPOINT_HTTP__BASIC" -}}
{{ $envPrefix}}_REALM: {{ .Values.docspell.server.integrationEndpoint.httpBasic.realm }}
{{- with .Values.docspell.server.integrationEndpoint.httpBasic.credentials }}
{{ $envPrefix}}_USER: {{ .username }}
{{ $envPrefix}}_PASSWORD: {{ .password }}
{{- end -}}
{{- end }}
{{- if .Values.docspell.server.integrationEndpoint.httpHeader.enabled -}}
{{- if and .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.value .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.existingSecret -}}
{{- fail "Only either the fixed header value or an existing secret for the http header ingration endpoint should be set" -}}
{{- end -}}
{{ $envPrefix := "DOCSPELL_SERVER_INTEGRATION__ENDPOINT_HTTP__HEADER" }}
{{ $envPrefix }}_HEADER__NAME: {{ .Values.docspell.server.integrationEndpoint.httpHeader.headerName }}
{{- with .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.value -}}
{{ $envPrefix }}_HEADER__VALUE: {{ .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.value }}
{{- end -}}
{{- end }}
{{- end -}}
{{/*Admin Endpoint Secrets*/}}
{{- define "docspell.server.secrets.adminEndpoint" -}}
{{- if .Values.docspell.server.adminEndpoint.enabled -}}
{{- $context := . -}}
{{- with .Values.docspell.server.adminEndpoint.secret -}}
{{- if $context.Values.docspell.server.adminEndpoint.existingSecret }}
{{- fail "Only either the fixed value or an existing secret for the admin endpoint should be set" -}}
{{- end -}}
DOCSPELL_SERVER_ADMIN__ENDPOINT_SECRET: {{ .value }}
{{- end -}}
{{- end -}}
{{- end -}}
{{/*Signup Settings*/}}
{{- define "docspell.server.config.signup" -}}
{{- $envPrefix := "DOCSPELL_SERVER_BACKEND_SIGNUP" -}}
{{ $envPrefix }}_MODE: {{ .Values.docspell.server.backend.signup.mode }}
{{- if eq .Values.docspell.server.backend.signup.mode "invite" }}
{{ $envPrefix }}_INVITE__TIME: {{ .Values.docspell.server.backend.signup.inviteTime }}
{{- end -}}
{{- end -}}
{{/*Signup Secrets*/}}
{{- define "docspell.server.secrets.signup" -}}
{{- if eq .Values.docspell.server.backend.signup.mode "invite" }}
{{- $context := . -}}
{{- with .Values.docspell.server.backend.signup.newInvitePassword.value -}}
{{- if $context.Values.docspell.server.backend.signup.newInvitePassword.existingSecret -}}
{{- fail "Only either the fixed value or an existing secret for the new invite password should be set" -}}
{{- end -}}
DOCSPELL_SERVER_BACKEND_SIGNUP_NEW__INVITE__PASSWORD: {{ . }}
{{- end -}}
{{- end -}}
{{- end -}}

View File

@ -0,0 +1,86 @@
{{- define "docspell.server.secrets.existingSecrets" -}}
{{/*Server Secret*/}}
{{- if .Values.docspell.server.auth.serverSecret -}}
{{- if and .Values.docspell.server.auth.serverSecret.existingSecret (not .Values.docspell.server.auth.serverSecret.value) -}}
- name: DOCSPELL_SERVER_AUTH_SERVER__SECRET
valueFrom:
secretKeyRef:
name: {{ .Values.docspell.server.auth.serverSecret.existingSecret.name }}
key: {{ .Values.docspell.server.auth.serverSecret.existingSecret.key }}
{{- end -}}
{{- end }}
{{/*OIDC Secrets*/}}
{{- range $index, $entry := .Values.docspell.server.openid -}}
{{- if and $entry.enabled $entry.provider.existingSecret -}}
{{- $envPrefix := printf "%s_%s_PROVIDER" "DOCSPELL_SERVER_OPENID" ($index | toString) -}}
- name: {{ $envPrefix }}_CLIENT__ID
valueFrom:
secretKeyRef:
name: {{ $entry.provider.existingSecret.name }}
key: {{ $entry.provider.existingSecret.clientIdKey }}
- name: {{ $envPrefix }}_CLIENT__SECRET
valueFrom:
secretKeyRef:
name: {{ $entry.provider.existingSecret.name }}
key: {{ $entry.provider.existingSecret.clientSecretKey }}
- name: {{ $envPrefix }}_SIGN__KEY
{{- if $entry.provider.existingSecret.signKeyKey -}}
valueFrom:
secretKeyRef:
name: {{ $entry.provider.existingSecret.name }}
key: {{ $entry.provider.existingSecret.signKeyKey }}
{{- else }}
value: ""
{{- end -}}
{{- end -}}
{{- end -}}
{{/*Integration Endpoint Http Basic Auth*/}}
{{- if .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret }}
- name: DOCSPELL_SERVER_INTEGRATION__ENDPOINT_HTTP__BASIC_USER
valueFrom:
secretKeyRef:
name: {{ .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret.name }}
key: {{ .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret.usernameKey }}
- name: DOCSPELL_SERVER_INTEGRATION__ENDPOINT_HTTP__BASIC_PASSWORD
valueFrom:
secretKeyRef:
name: {{ .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret.name }}
key: {{ .Values.docspell.server.integrationEndpoint.httpBasic.existingSecret.passwordKey }}
{{- end }}
{{/*Integration Endpoint Http Header Auth*/}}
{{- if and .Values.docspell.server.integrationEndpoint.enabled .Values.docspell.server.integrationEndpoint.httpHeader.enabled -}}
{{- if .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.existingSecret }}
- name: DOCSPELL_SERVER_INTEGRATION__ENDPOINT_HTTP__HEADER_HEADER__VALUE
valueFrom:
secretKeyRef:
name: {{ .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.existingSecret.name }}
key: {{ .Values.docspell.server.integrationEndpoint.httpHeader.headerValue.existingSecret.key }}
{{- end -}}
{{- end }}
{{/*Admin Endpoint Secret*/}}
{{- with .Values.docspell.server.adminEndpoint.existingSecret }}
- name: DOCSPELL_SERVER_ADMIN__ENDPOINT_SECRET
valueFrom:
secretKeyRef:
name: {{ .name }}
key: {{ .key }}
{{- end }}
{{/*Sign Up Invitation Generation Password*/}}
{{- if eq .Values.docspell.server.backend.signup.mode "invite" -}}
{{- with .Values.docspell.server.backend.signup.newInvitePassword.existingSecret }}
- name: DOCSPELL_SERVER_BACKEND_SIGNUP_NEW__INVITE__PASSWORD
valueFrom:
secretKeyRef:
name: {{ .name }}
key: {{ .key }}
{{- end -}}
{{- end }}
{{/*PostgreSQL Password*/}}
{{- if .Values.postgresql.global.postgresql.auth.existingSecret -}}
- name: DOCSPELL_SERVER_BACKEND_JDBC_PASSWORD
valueFrom:
secretKeyRef:
name: {{ .Values.postgresql.global.postgresql.auth.existingSecret }}
key: {{ .Values.postgresql.global.postgresql.auth.secretKeys.userPasswordKey | default "password" }}
{{- end -}}
{{- end -}}

View File

@ -0,0 +1,28 @@
{{/*Common labels*/}}
{{- define "restserver.labels" -}}
helm.sh/chart: {{ include "docspell.chart" . }}
app: {{ include "docspell.name" . }}-restserver
{{ include "restserver.selectorLabels" . }}
app.kubernetes.io/version: {{ .Values.restserver.image.tag | default .Chart.AppVersion | quote }}
version: {{ .Values.restserver.image.tag | default .Chart.AppVersion | quote }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*Selector labels*/}}
{{- define "restserver.selectorLabels" -}}
app.kubernetes.io/name: {{ include "docspell.name" . }}-restserver
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*Create restserver image name and tag used by the deployment*/}}
{{- define "restserver.image" -}}
{{- $registry := .Values.global.imageRegistry | default .Values.restserver.image.registry -}}
{{- $repository := .Values.restserver.image.repository -}}
{{- $separator := ":" -}}
{{- $tag := .Values.restserver.image.tag | default .Chart.AppVersion -}}
{{- if $registry -}}
{{- printf "%s/%s%s%s" $registry $repository $separator $tag -}}
{{- else -}}
{{- printf "%s%s%s" $repository $separator $tag -}}
{{- end -}}
{{- end -}}

View File

@ -0,0 +1,27 @@
kind: ConfigMap
apiVersion: v1
metadata:
name: {{ include "docspell.fullname" . }}-restserver
labels:
{{- include "restserver.labels" . | nindent 4 }}
data:
DOCSPELL_SERVER_APP__NAME: {{ .Values.docspell.server.appName }}
DOCSPELL_SERVER_APP__ID: {{ include "docspell.server.config.appId" . }}
DOCSPELL_SERVER_INTERNAL__URL: http://{{ include "docspell.fullname" . }}-restserver:{{ .Values.restserver.service.port }}
{{- include "docspell.server.config.logging" . | nindent 4 }}
{{- include "docspell.server.config.bind" . |nindent 4 }}
DOCSPELL_SERVER_MAX__ITEM__PAGE__SIZE: {{ .Values.docspell.server.maxItemPageSize | quote }}
DOCSPELL_SERVER_MAX__NOTE_LENGTH: {{ .Values.docspell.server.maxNoteLength | quote }}
DOCSPELL_SERVER_SHOW__CLASSIFICATION__SETTINGS: {{ .Values.docspell.server.showClassificationSettings | quote }}
{{- include "docspell.server.config.auth" . | nindent 4 }}
{{- include "docspell.server.config.donwload" . | nindent 4 }}
{{- include "docspell.server.config.openid" . | nindent 4 }}
{{- if .Values.docspell.server.integrationEndpoint.enabled -}}
{{- include "docspell.server.config.integrationEndpoint" . | nindent 4 }}
{{- end }}
{{- if .Values.docspell.fullTextSearch.enabled -}}
{{ include "docspell.config.fullTextSearch" (dict "context" . "type" "server") | nindent 4 }}
{{- end }}
DOCSPELL_SERVER_BACKEND_MAIL__DEBUG: {{ .Values.docspell.server.backend.mailDebug | quote }}
{{- include "docspell.server.config.databaseSchema" . | nindent 4 }}
{{- include "docspell.server.config.signup" . | nindent 4 }}

View File

@ -0,0 +1,59 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "docspell.fullname" . }}-restserver
annotations:
{{- if .Values.restserver.deployment.annotations }}
{{- toYaml .Values.restserver.deployment.annotations | nindent 4 }}
{{- end }}
labels:
{{- include "restserver.labels" . | nindent 4 }}
spec:
replicas: {{ .Values.restserver.replicaCount }}
strategy:
type: {{ .Values.restserver.strategy.type }}
{{- if eq .Values.restserver.strategy.type "RollingUpdate" }}
rollingUpdate:
maxUnavailable: {{ .Values.restserver.strategy.rollingUpdate.maxUnavailable }}
maxSurge: {{ .Values.restserver.strategy.rollingUpdate.maxSurge }}
{{- end }}
selector:
matchLabels:
{{- include "restserver.selectorLabels" . | nindent 6 }}
{{- if .Values.restserver.deployment.labels }}
{{- toYaml .Values.restserver.deployment.labels | nindent 6 }}
{{- end }}
template:
metadata:
annotations:
checksum/config: {{ include (print $.Template.BasePath "/restserver/config.yaml") . | sha256sum }}
checksum/secret: {{ include (print $.Template.BasePath "/restserver/secret.yaml") . | sha256sum }}
{{- with .Values.restserver.podAnnotations }}
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "restserver.labels" . | nindent 8 }}
{{- if .Values.restserver.deployment.labels }}
{{- toYaml .Values.restserver.deployment.labels | nindent 8 }}
{{- end }}
spec:
{{- if (or .Values.serviceAccount.create .Values.serviceAccount.name) }}
serviceAccountName: {{ include "docspell.serviceAccountName" . }}
{{- end }}
terminationGracePeriodSeconds: {{ .Values.restserver.deployment.terminationGracePeriodSeconds }}
containers:
- name: restserver
image: "{{ include "restserver.image" . }}"
imagePullPolicy: {{ .Values.restserver.image.pullPolicy }}
ports:
- containerPort: {{ .Values.restserver.service.port }}
name: http
env:
{{- include "docspell.server.secrets.existingSecrets" . | nindent 10 }}
envFrom:
- configMapRef:
name: {{ include "docspell.fullname" . }}-restserver
- secretRef:
name: {{ include "docspell.fullname" . }}-restserver-secret
resources:
{{- toYaml .Values.restserver.resources | nindent 12 }}

View File

@ -0,0 +1,34 @@
apiVersion: v1
kind: Secret
metadata:
name: {{ include "docspell.fullname" . }}-restserver-secret
labels:
{{- include "restserver.labels" . | nindent 4 }}
type: Opaque
stringData:
assertions:
{{- if gt .Values.restserver.replicaCount 1.0 }}
{{- if not .Values.docspell.server.auth.serverSecret -}}
{{- fail "If multiple replicas are running of the rest server, the server secret has to be fixed." -}}
{{- else if not (or .Values.docspell.server.auth.serverSecret.existingSecret .Values.docspell.server.auth.serverSecret.value) }}
{{- end -}}
{{- if and .Values.docspell.server.adminEndpoint.enabled (and (not .Values.docspell.server.adminEndpoint.existingSecret) (not .Values.docspell.server.adminEndpoint.secret)) -}}
{{- fail "When enabling the administration endpoint, a value for authentication has the supplied." -}}
{{- end -}}
{{- end -}}
{{- range $entry := .Values.docspell.server.openid -}}
{{- if and (not $entry.provider.userUrl) (not $entry.provider.signKey) -}}
{{- fail (printf "Failure for %s, if no userUrl is set a signKey has to be specified." $entry.provider.providerId) -}}
{{- end -}}
{{- end -}}
{{- if eq .Values.docspell.server.backend.signup.mode "invite" -}}
{{- if not .Values.docspell.server.backend.signup.newInvitePassword -}}
{{- fail "Invite password has to be set, when using signup mode 'invite'." -}}
{{- end -}}
{{- end -}}
{{- include "docspell.server.secrets.auth" . | nindent 4 }}
{{- include "docspell.server.secrets.openid" . | nindent 4 }}
{{- include "docspell.server.secrets.integrationEndpoint" . | nindent 4 }}
{{- include "docspell.server.secrets.adminEndpoint" . | nindent 4 }}
{{- include "docspell.secrets.JDBC" (dict "context" . "type" "server") | nindent 4 -}}
{{- include "docspell.server.secrets.signup" . | nindent 4 -}}

View File

@ -0,0 +1,20 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "docspell.fullname" . }}-restserver
labels:
{{- include "restserver.labels" . | nindent 4 }}
{{- if .Values.restserver.service.labels }}
{{- toYaml .Values.restserver.serivce.labels | nindent 4 }}
{{- end }}
annotations:
{{- toYaml .Values.restserver.service.annotations | nindent 4 }}
spec:
type: {{ .Values.restserver.service.type }}
ports:
- port: {{ .Values.restserver.service.port }}
targetPort: {{ .Values.restserver.service.targetPort | default .Values.restserver.service.port }}
protocol: TCP
name: http
selector:
{{- include "restserver.selectorLabels" . | nindent 6 -}}

View File

@ -0,0 +1,20 @@
{{- if .Values.serviceAccount.create }}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "docspell.serviceAccountName" . }}
namespace: {{ .Release.Namespace | quote }}
labels:
{{- include "docspell.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.labels }}
{{- . | toYaml | nindent 4 }}
{{- end }}
{{- with .Values.serviceAccount.annotations }}
{{- . | toYaml | nindent 4 }}
{{- end }}
automountServiceAccountToken: {{ .Values.serviceAccount.automountServiceAccountToken }}
{{- with .Values.serviceAccount.imagePullSecrets }}
imagePullSecrets:
{{- . | toYaml | nindent 2 }}
{{- end }}
{{- end }}

View File

@ -0,0 +1,43 @@
{{/*Common labels*/}}
{{- define "solr.labels" -}}
helm.sh/chart: {{ include "docspell.chart" . }}
app: {{ include "docspell.name" . }}-solr
{{ include "solr.selectorLabels" . }}
app.kubernetes.io/version: {{ .Values.solr.image.tag | quote }}
version: {{ .Values.solr.image.tag | quote }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*Selector labels*/}}
{{- define "solr.selectorLabels" -}}
app.kubernetes.io/name: {{ include "docspell.name" . }}-solr
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*Create solr image name and tag used by the deployment*/}}
{{- define "solr.image" -}}
{{- $registry := .Values.global.imageRegistry | default .Values.solr.image.registry -}}
{{- $repository := .Values.solr.image.repository -}}
{{- $separator := ":" -}}
{{- $tag := .Values.solr.image.tag | default .Chart.AppVersion -}}
{{- if $registry -}}
{{- printf "%s/%s%s%s" $registry $repository $separator $tag -}}
{{- else -}}
{{- printf "%s%s%s" $repository $separator $tag -}}
{{- end -}}
{{- end -}}
{{/*Connection URL*/}}
{{- define "solr.url" -}}
{{- $port := .Values.solr.service.port | toString -}}
{{- $service := printf "%s-solr" (include "docspell.fullname" .) -}}
{{- printf "http://%s:%s/solr/docspell" $service $port -}}
{{- end }}
{{/*Storage Class*/}}
{{- define "solr.persistence.storageClass" -}}
{{- $storageClass := .Values.solr.persistence.storageClass | default .Values.global.storageClass -}}
{{- if $storageClass -}}
storageClassName: {{ $storageClass | quote }}
{{- end -}}
{{- end -}}

View File

@ -0,0 +1,21 @@
{{- if and .Values.solr.enabled .Values.solr.persistence.enabled -}}
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: {{ .Values.solr.persistence.claimName }}
namespace: {{ $.Release.Namespace }}
annotations:
{{ .Values.solr.persistence.annotations | toYaml | indent 4}}
spec:
accessModes:
{{- .Values.solr.persistence.accessModes | toYaml | nindent 4 }}
volumeMode: Filesystem
{{- include "solr.persistence.storageClass" . | nindent 2 }}
{{- with .Values.solr.persistence.volumeName }}
volumeName: {{ . }}
{{- end }}
resources:
requests:
storage: {{ .Values.solr.persistence.size }}
{{- end }}

View File

@ -0,0 +1,22 @@
{{- if .Values.solr.enabled -}}
apiVersion: v1
kind: Service
metadata:
name: {{ include "docspell.fullname" . }}-solr
labels:
{{- include "solr.labels" . | nindent 4 }}
{{- if .Values.solr.service.labels }}
{{- toYaml .Values.solr.serivce.labels | nindent 4 }}
{{- end }}
annotations:
{{- toYaml .Values.solr.service.annotations | nindent 4 }}
spec:
type: {{ .Values.solr.service.type }}
ports:
- port: {{ .Values.solr.service.port }}
targetPort: {{ .Values.solr.service.targetPort | default .Values.solr.service.port }}
protocol: TCP
name: solr
selector:
{{- include "solr.selectorLabels" . | nindent 4 }}
{{- end -}}

View File

@ -0,0 +1,98 @@
{{- if .Values.solr.enabled -}}
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: {{ include "docspell.fullname" . }}-solr
annotations:
{{- if .Values.solr.statefulSet.annotations }}
{{- toYaml .Values.solr.statefulSet.annotations | nindent 4 }}
{{- end }}
labels:
{{- include "solr.labels" . | nindent 4 }}
spec:
replicas: 1
selector:
matchLabels:
{{- include "solr.selectorLabels" . | nindent 6 }}
{{- if .Values.solr.statefulSet.labels }}
{{- toYaml .Values.solr.statefulSet.labels | nindent 6 }}
{{- end }}
serviceName: {{ include "docspell.fullname" . }}-solr
template:
metadata:
annotations:
{{- with .Values.solr.podAnnotations }}
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "solr.labels" . | nindent 8 }}
{{- if .Values.solr.statefulSet.labels }}
{{- toYaml .Values.solr.statefulSet.labels | nindent 8 }}
{{- end }}
app: solr
spec:
{{- if (or .Values.serviceAccount.create .Values.serviceAccount.name) }}
serviceAccontName: {{ include "docspell.serviceAccountName" . }}
{{- end }}
initContainers:
- name: solr-fix-permissions
image: busybox
command:
- sh
- -c
- chown -R 8983:8983 /var/solr
volumeMounts:
- name: solr-data
mountPath: /var/solr
resources:
{{- toYaml .Values.solr.initContainers.resources | nindent 12 }}
terminationGracePeriodSeconds: {{ .Values.solr.statefulSet.terminationGracePeriodSeconds }}
containers:
- name: solr
image: "{{ include "solr.image" . }}"
imagePullPolicy: {{ .Values.solr.image.pullPolicy }}
env:
- name: SOLR_OPTS
value: -Dsolr.modules=analysis-extras
command:
- docker-entrypoint.sh
- solr-precreate
- docspell
ports:
- containerPort: {{ .Values.solr.service.port }}
name: solr
{{- if .Values.solr.livenessProbe.enabled }}
livenessProbe:
{{- toYaml (omit .Values.solr.livenessProbe "enabled") | nindent 10 }}
{{- end }}
{{- if .Values.solr.readinessProbe.enabled }}
readinessProbe:
{{- toYaml (omit .Values.solr.readinessProbe "enabled") | nindent 10 }}
{{- end }}
resources:
{{- toYaml .Values.solr.resources | nindent 10 }}
volumeMounts:
- name: solr-data
mountPath: /var/solr
{{- with .Values.global.hostAliases }}
hostAliases:
{{- toYaml . | nindent 6 }}
{{- with .Values.solr.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 6 }}
{{- end }}
{{- with .Values.solr.affinity }}
affinity:
{{- toYaml . | nindent 6 }}
{{- end }}
{{- end }}
volumes:
{{- if .Values.solr.persistence.enabled }}
- name: solr-data
persistentVolumeClaim:
claimName: {{ .Values.solr.persistence.claimName }}
{{- else if not .Values.solr.persistence.enabled }}
- name: solr-data
emptyDir: {}
{{- end }}
{{- end -}}

View File

@ -0,0 +1,608 @@
# Default values for docspell.
# This is a YAML-formatted file.
# Declare variables to be passed to your templates.
## @section Global
#
## @param global.imageRegistry global image registry override
## @param global.imagePullSecrets global image pull secrets override; can be extended by `imagePullSecrets`
## @param global.storageClass global storage class override
## @param global.hostAliases global hostAliases which will be added to the pod's hosts files
global:
imageRegistry: ""
## E.g.
## imagePullSecrets:
## - myRegistryKeySecretName
##
imagePullSecrets: []
storageClass: ""
hostAliases: []
# - ip: 192.168.137.2
# hostnames:
# - example.com
## @section Docspell
docspell:
## @param docspell.fullTextSearch.enabled The full-text search feature can be disabled and can be re-enabled at any time
## @param docspell.fullTextSearch.backend Which backend to use, either solr or postgresql
## @param docspell.fullTextSearch.solr.commitWithin Used to tell solr when to commit the data
## @param docspell.fullTextSearch.solr.logVerbose If true, logs request and response bodies
## @param docspell.fullTextSearch.solr.defType The defType parameter to lucene that defines the parses to use. (https://solr.apache.org/guide/8_4/query-syntax-and-parsing.html#query-syntax-and-parsing)
## @param docspell.fullTextSearch.solr.qOp The default combiner for tokens (AND / OR)
fullTextSearch:
enabled: true
solr:
commitWithin: 1000
logVerbose: false
defType: lucene
qOp: OR
## @param docspell.server.appName Name of the application shown in the top right corner of the web application
## @param docspell.server.appId Id of the node
## @param docspell.server.maxItemPageSize Hard limit of batch returned items for search
## @param docspell.server.maxNoteLength Number of characters to return for each item notes when searching
## @param docspell.server.showClassificationSettings Whether the classification form in the collective settings is displayed or not
server:
appName: Docspell
appId:
maxItemPageSize: 200
maxNoteLength: 180
showClassificationSettings: true
## @param docspell.server.logging.format Format of log messages. Can be json,. Logfmt, Fancy or Plain
## @param docspell.server.logging.minimumLevel Minimum level of the log. From lowest to highest: Trace, Debug, Info, Warn, Error
logging:
format: "fancy"
minimumLevel: "Warn"
## @param docspell.server.bind.address The address the server binds to. Should be set to `0.0.0.0` as otherwise it'll reject connections from the ingress
## @param docspell.server.bind.port The port the server binds to. Make sure to use the same as in the port for the service and ingress
bind:
address: 0.0.0.0
port: 7880
## @param docspell.server.auth.serverSecret.value Secret to sign the authenticator tokens. If empty, one will be generated
## @param docspell.server.auth.serverSecret.existingSecret.name The name of an existing Kubernetes secret that contains the server secret
## @param docspell.server.auth.serverSecret.existingSecret.key The key inside the existing Kubernetes secret that contains the server secret
## @param docspell.server.auth.sessionValid How long an authentication token is valid
## @param docspell.server.auth.onAccountSourceConflict Fail if a duplicate account from an external source should fail the login. Can be: fail, convert
## @param docspell.server.auth.rememberMe.enabled Enable/disable the remember me function
## @param docspell.server.auth.rememberMe.valid How long the remember me cookie/token is valid
auth:
serverSecret:
# value: asdf
# existingSecret:
# name: "my-existing-secret"
# key: "key-inside-secret"
sessionValid: "5 minutes"
onAccountSourceConflict: fail
rememberMe:
enabled: true
valid: "30 days"
## @param docspell.server.downloadAll.maxFiles How many files to allow in "download as zip"
## @param docspell.server.downloadAll.maxSize The maximum (uncompressed) size of the zip file contents.
donwloadAll:
maxFiles: 500
maxSize: 1400M
## @param docspell.server.openid OpenID Connect (oidc) or OAuth2 authentication providers. Only the "Authorization Code Flow" is supported
openid:
- display: Keycloak
enabled: false
provider:
providerId: keycloak
clientId: docspell
clientSecret: example-secret-439e-bf06-911e4cdd56a6
authorizeUrl: http://localhost:8080/auth/realms/home/protocol/openid-connect/auth
tokenUrl: http://localhost:8080/auth/realms/home/protocol/openid-connect/token
scope: openid profile email
# User URL is not used when signature key is set
# userUrl: http://localhost:8080/auth/realms/home/protocol/openid-connect/userinfo
logoutUrl: http://localhost:8080/auth/realms/home/protocol/openid-connect/logout
signKey: b64:anVzdC1hLXRlc3Q=
sigAlgo: RS512
# existingSecret:
# name: "my-existing-secret"
# clientIdKey: clientId
# clientSecretKey: clientSecret
# signKeyKey: signKey
# The collective of the user is given in the access token as property `docspell_collective`
collectiveKey: "lookup:docspell_collective"
# The username to use for the docspell account
userKey: preferred_username
## @param docspell.server.oidcAutoRedirect When exactly one OIDC/OAuth provider is configured, then the webapp automatically redirects to its authentication page skipping the docspell login page
oidcAutoRedirect: true
## @param docspell.server.integrationEndpoint.enabled Enable endpoint to upload files to any collective
## @param docspell.server.integrationEndpoint.priorty Priority to use when submitting files through this endpoint
## @param docspell.server.integrationEndpoint.sourceName The name used for the item "source" property when uploaded through this endpoint
## @param docspell.server.integrationEndpoint.allowedIps.enabled Enable ip-allow-access-list
## @param docspell.server.integrationEndpoint.allowedIps.ips List of ips which should be added to the access list
integrationEndpoint:
enabled: true
priority: low
sourceName: integration
allowedIps:
enabed: false
ips:
# IP addresses may be specific as simple globs: a part marked as '*' matches any octet, like in `192.168.*.*`
- 127.0.0.1
## @param docspell.server.integrationEndpoint.httpBasic.enabled Whether integration endpoint requests are expected to use http basic auth when uploading files
## @param docspell.server.integrationEndpoint.httpBasic.credentials.user The username for httpBasic authentication
## @param docspell.server.integrationEndpoint.httpBasic.credentials.password The password for the httpBasic authentication
## @param docspell.server.integrationEndpoint.httpBasic.existingSecret.name Name of an existing Kubernetes secret that contains the httpBasic credentials
## @param docspell.server.integrationEndpoint.httpBasic.existingSecret.usernameKey The key inside the existing Kubernetes secret that contains the username for httpBasic
## @param docspell.server.integrationEndpoint.httpBasic.existingSecret.passwordKey The key inside the existing Kubernetes secret that contains the password for httpBasic
httpBasic:
enabled: false
realm: "Docspell Integration"
credentials:
# username: "docspell-int"
# password: "docspell-int"
# existingSecret:
# name: "http-basic-secret-name"
# usernameKey: "username-key-inside-secret"
# passwordKey: "password-key-inside-secret"
## @param doscpell.server.integrationEndpoint.httpHeader.enabled Whether integration endpoint requests are expected to supply some specific header when uploading files
## @param docpsell.server.integrationEndpoint.httpHeader.headerName The name of the header that has to be included in the integration endpoint request
## @param docspell.server.integrationEndpoint.httpHeader.headerValue.value The header value that is expected to be included in the integration endpoint request
## @param docspell.server.integrationEndpojnt.httpHeader.headerValue.existingSecret.name The name of an existing Kubernetes secret that contains the value expected to be included in the integration endpoint request
## @param docspell.server.integrationEndpojnt.httpHeader.headerValue.existingSecret.key The key inside of an existing Kubernetes secret that contains the value expected to be included in the integration endpoint
httpHeader:
enabled: false
headerName: "Docspell-Integration"
headerValue:
# value: "SomeSecret"
# existingSecret:
# name: "my-existing-secret"
# key: "header-value-key-inside-secret"
## @param docspell.server.adminEndpoint.enabled Whether to enable the special administration endpoint. A secret value or existing secret containing the value has to be supplied when enabled
## @param docspell.server.adminEndpoint.secret.value Value for the administration endpoint
## @param docspell.server.adminEndpoint.existingSecret.name The name of an existing Kubernetes secret that contains the value for the admin endpoint
## @param docspell.server.adminEndpoint.existingSecret.key The key inside of an existing Kubernetes secret that contains the value for the admin endpoint
adminEndpoint:
enabled: false
# secret:
# value: "test"
# existingSecret:
# name: "my-existing-secret"
# key: "admin-key-inside-secret"
## @param docspell.server.backend.mailDebug Enable or disabling debugging for e-mail related functionality
backend:
mailDebug: false
## @param docspell.server.backend.databaseSchema.runMainMigrations Whether to run mian database migrations
## @param docspell.server.backend.databaseSchema.runFixupMigrations Whether to run the fixup migrations
## @param docspell.server.backend.databaseSchema.repairSchema Use with care. This repairs all migrations in the datbase by updating their checksums and removing failed migrations
databaseSchema:
runMainMigrations: true
runFixupMigrations: true
repairSchema: false
## @param docspell.server.backend.signup.mode The mode defines if new users can signup or not (open, invite, closed)
## @param docspell.server.backend.signup.newInvitePassword.value If mode is 'invite', a password must be provided to generate invitation keys
## @param docspell.server.backend.signup.newInvitePassword.existingSecret.name The name of an existing Kubernetes secret that contains the invitation generation password
## @param docspell.server.backend.signup.newINvitePassword.existingSecret.key The key inside of an existing Kubernetes secret that contains the invitation generation password
## @param docspell.server.backend.signup.inviteTime If mode is 'invite', this is the period an invitation token is considered valid
signup:
mode: open
newInvitePassword:
# value: asdf
# existingSecret:
# name: "my-existing-secret"
# key: "invite-password-key"
inviteTime: "3 days"
## @param docspell.joex.appId Id of the node
## @param docspell.joex.mailDebug Enable or disabling debugging for e-mail related functionality
joex:
appId:
mailDebug: false
## @param docspell.joex.bind.address The address joex binds to. Should be set to `0.0.0.0`, as otherwise it'll refuse connections
## @param docspell.joex.bind.port The port joex binds to. Make sure to set the same port for the service
bind:
address: 0.0.0.0
port: 7878
## @param docspell.joex.logging.format Format of log messages. Can be json,. Logfmt, Fancy or Plain
## @param docspell.joex.logging.minimumLevel Minimum level of the log. From lowest to highest: Trace, Debug, Info, Warn, Error
logging:
format: "fancy"
minimumLevel: "Warn"
## @param docspell.joex.databaseSchema.runMainMigrations Whether to run mian database migrations
## @param docspell.joex.databaseSchema.runFixupMigrations Whether to run the fixup migrations
## @param docspell.joex.databaseSchema.repairSchema Use with care. This repairs all migrations in the datbase by updating their checksums and removing failed migrations
databaseSchema:
runMainMigrations: true
runFixupMigrations: true
repairSchema: false
## @param doscpell.joex.scheduler.name Each scheduler needs a unique name. This defaults to the node name
## @param docspell.joex.scheduler.poolSize Number of processing allowed in parallel
## @param docspell.joex.scheduler.countingScheme A counting s cheme determines the ratio of how high- and low-prio jobs are run
## @param docspell.joex.scheduler.retries How often a failed job should be retried until it enters faield state
## @param docspell.joex.scheduler.retryDelay The delay until the next try is performed for a failed job
## @param docspell.joex.scheduler.logBufferSize The queue size of log statements from a job
## @param docspell.joex.scheduler.wakeupPeriod If no job is left un the queue, the scheduler will wait until a notify is requested
scheduler:
name:
poolSize: 1
countingScheme: "4,1"
retries: 2
retryDelay: "1 minute"
logBufferSize: 500
wakeupPeriod: "30 minutes"
## @param docspell.joex.periodicScheduler.name Each scheduler needs a unique name. This defaults to the node name
## @param docspell.joex.periodicScheduler.wakeupPeriod A fallback to start looking for due periodic tasks regularly
periodicScheduler:
name:
wakeupPeriod: "10 minutes"
## @param docspell.joex.userTasks.scanMailbox.maxFolders A limit of how many folders to scan through. If a user configures more than this, only upto this limit folders are scanned and a warning is logged
## @param docspell.joex.userTasks.scanMailbox.mailChunkSize How many mails (headers only) to retreieve in one chunk
## @param docspell.joex.userTasks.scanMailbox.maxMails A limit on how many mails to process in one job run. This is meant to avoid too heavy resource allocation to one user/collective
userTasks:
scanMailbox:
maxFolders: 50
mailChunkSize: 50
maxMails: 500
## @param docspell.joex.houseKeeping.schedule When the house keeping tasks execute. Default is to run every week
## @param docspell.joex.houseKeeping.cleanupInvites.enabled Whether to remove invation keys that have been created but not used
## @param docspell.joex.houseKeeping.cleanupInvites.olderThan The minimum age of invites to be deleted
## @param docspell.joex.houseKeeping.cleanupRememberMe.enabled Whether to remove expired remember-me tokens
## @param docspell.joex.houseKeeping.cleanupRememberMe.olderThan The minimum age of tokens to be deleted
## @param docspell.joex.houseKeeping.cleanupJobs.enabled Whether to delete old job log files. Logs are already stored in the database
## @param docspell.joex.houseKeeping.cleanupJobs.olderThan The minimum age of jobs to delete
## @param docspell.joex.houseKeeping.cleanupJobs.deleteBatch how many jobs are deleted in one transaction
## @param docspell.joex.houseKeeping.cleanupDownloads.enabled Whether to delete cached zip files for past downloads
## @param docspell.joex.houseKeeping.cleanupDownloads.olderThan The minimum age of a download file to be deleted
## @param docspell.joex.houseKeeping.cleanupNodes.enabled Whether to delete node entries that are not reachable anymore
## @param docspell.joex.houseKeeping.cleanupNodes.minNotFound How often the node must be unreachable, before it's removed
## @param docspell.joex.houseKeeping.integrityCheck.enabled Whether to check all files against their checksum
houseKeeping:
schedule: "Sun *-*-* 00:00:00 UTC"
cleanupInvites:
enabled: true
olderThan: "30 days"
cleanupRememberMe:
enabled: true
olderThan: "30 days"
cleanupJobs:
enabled: true
olderThan: "30 days"
deleteBatch: 100
cleanupDownloads:
enabled: true
olderThan: "14 days"
cleanupNodes:
enabled: true
minNotFound: 2
integrityCheck:
enabled: true
## @param docspell.joex.updateCheck.enabled Whether to periodically check for new releases of docspell
## @param docspell.joex.updateCheck.testRun Sends the mail without checking the latest release
## @param docspell.joex.updateCheck.schedule When the update check should execute. Default is to run every week
## @param docspell.joex.updateCheck.senderAccount An account id in the form of `collective/user` This user account must have at least one valid SMTP settings which are used to send the mail
## @param docspell.joex.updateCheck.smtpId The SMTP conenction id that should be used for sending the mail
## @param docspell.joex.updateCheck.recipients A list of recipient e-mail addresses
## @param docspell.joex.updateCheck.subject The subject of the mail. If supports the same variables as the body
## @param docspell.joex.updateCheck.body The body of the mail. Subject and body can contain these variables which are replaced: latestVersion, currentVersion, releasedAt. The body is processed as markdown after the variables have been replaced
updateCheck:
enabled: false
testRun: false
schedule: "Sun *-*-* 00:00:00 UTC"
senderAccount:
smtpId:
recipients: []
# - john.doe@gmail.com
subject: "Docspell {{ latestVersion }} is available"
body: |-
Hello,
You are currently running Docspell {{ currentVersion }}. Version *{{ latestVersion }}*
is now available, which was released on {{ releasedAt }}. Check the release page at:
<https://github.com/eikek/docspell/releases/latest>
Have a nice day!
Docpell Update Check
## @param docspell.joex.convert.htmlConverter Which HTML->PDF converter command to use. (wkhtmlpdf, weasyprint)
convert:
htmlConverter: wkhtmlpdf
## @param docspell.joex.fullTextSearch.migration.indexAllChunk Chink size to use when indexing data from the database. This many attachments are loaded into memory and pushed to the full-text index
fullTextSearch:
migration:
indexAllChink: 10
## @section Ingress
#
## @param ingress.enabled Enable ingress
## @param ingress.className Ingress class name
## @param ingress.annotations Ingress annotations
## @param ingress.hosts[0].host Default Ingress host
## @parem ingress.hosts[0].paths[0].path Default Ingress path
## @param ingress.hosts[0].paths[0].pathType Ingress path type
## @param ingress.tls Ingress tls settings
## @extra ingress.apiVersion specify APIVersion of ingress object. Mostly would only be used for argocd
ingress:
enabled: true
className:
annotations:
# Set a proper upload size, so that large documents can be uploaded as well.
nginx.ingress.kubernetes.io/proxy-body-size: 10G
# This is needed for the webcocket connections to work.
nginx.ingress.kubernetes.io/configuration-snippet: |
proxy_set_header Upgrade "websocket";
proxy_set_header Connection "Upgrade";
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
hosts:
- host: docspell.example.com
paths:
- path: /
pathType: Prefix
tls: []
# - secretName: chart-exmaple-tls
# hosts:
# - docspell.example.com
## @section ServiceAccount
#
## @param serviceAccount.create Enable the creation of a ServiceAccount for docspell
## @param serviceAccount.name Name of the created ServieAccount, defauts to release name.
## @param serviceAccount.automountServiceAccountToken Enable/disable auto mounting of the service account token
## @param serviceAccount.imagePullSecrets Image pull secrets, available to the ServiceAccount
## @param serviceAccount.annotations Custom annotations for the ServiceAccount
## @param serviceAccount.labels Custom labels for the ServiceAccount
serviceAccount:
create: false
name: ""
automountServiceAccountToken: false
imagePullSecrets: []
# - name: private-registry-access
annotations: {}
labels: {}
## @section Restserver
#
## @param restserver.replicaCount Number of replicas for the restserver deployment
## @param restserver.podAnnotations Annotations for the solr pod
restserver:
replicaCount: 1
podAnnotations: {}
## @param restserver.image.registry Image registry, e.g. gcr.io,docker.io
## @param restserver.image.repository Image to start for this pod
## @param restserver.image.tag Visit [Image tag](https://hub.docker.com/r/docspell/restserver/tags?page=1&ordering=last_updated). Defaults to `appVersion` within Chart.yaml.
## @param restserver.image.pullPolicy Image pull policy
image:
registry: ""
repository: docspell/restserver
tag: ""
pullPolicy: IfNotPresent
## @param restserver.service.type Kubernetes service type for solr traffic
## @param restserver.service.port Port number for solr traffic
## @param restserver.service.annotations Solr service annotations
## @param restserver.service.labels Solr service additional labels
service:
type: ClusterIP
port: 7880
annotations: {}
labels: {}
## @param restserver.deployment.labels Labels for the restserver deployment
## @param restserver.deployment.annotations Annotations for the restserver deployment to be created
## @param restserver.deployment.terminationGracePeriodSeconds How long to wait until forcefully kill the restserver pod
## @param restserver.deployment.env Additional environment variables to pass to the restserver container
deployment:
labels: {}
annotations: {}
terminationGracePeriodSeconds: 60
env: []
## @param restserver.strategy.type Strategy type
## @param restserver.strategy.rollingUpdate.maxSurge maxSurge
## @param restserver.strategy.rollingUpdate.maxUnavailable maxUnavailable
strategy:
type: "RollingUpdate"
rollingUpdate:
maxSurge: "100%"
maxUnavailable: 0
## @param restserver.resources.limits.cpu CPU limit for the restserver pod
## @param restserver.resources.limits.memory Memory limit for the restserver pod
## @param restserver.resources.requests.cpu Requested cpu for the restserver pod
## @param restserver.resources.requests.memory Requested memory for the restserver pod
resources:
limits:
cpu: 1
memory: 1Gi
requests:
cpu: 0.5
memory: 512Mi
## @section Joex
#
## @param joex.replicaCount Number of replicas for the joex deployment
## @param joex.podAnnotations Annotations for the solr pod
## @param joex.args Additional arguments that should be passed to the pod
joex:
replicaCount: 1
podAnnotations: {}
additionalArgs:
- -J-Xmx3G
## @param joex.image.registry Image registry, e.g. gcr.io,docker.io
## @param joex.image.repository Image to start for this pod
## @param joex.image.tag Visit [Image tag](https://hub.docker.com/r/docspell/joex/tags?page=1&ordering=last_updated). Defaults to `appVersion` within Chart.yaml.
## @param joex.image.pullPolicy Image pull policy
image:
registry: ""
repository: docspell/joex
tag: ""
pullPolicy: IfNotPresent
## @param joex.service.type Kubernetes service type for solr traffic
## @param joex.service.port Port number for solr traffic
## @param joex.service.annotations Solr service annotations
## @param joex.service.labels Solr service additional labels
service:
type: ClusterIP
port: 7878
annotations: {}
labels: {}
## @param joex.deployment.labels Labels for the restserver deployment
## @param joex.deployment.annotations Annotations for the restserver deployment to be created
## @param joex.deployment.terminationGracePeriodSeconds How long to wait until forcefully kill the restserver pod
## @param joex.deployment.env Additional environment variables to pass to the restserver container
deployment:
labels: {}
annotations: {}
terminationGracePeriodSeconds: 60
env: []
## @param joex.strategy.type Strategy type
## @param joex.strategy.rollingUpdate.maxSurge maxSurge
## @param joex.strategy.rollingUpdate.maxUnavailable maxUnavailable
strategy:
type: "RollingUpdate"
rollingUpdate:
maxSurge: "100%"
maxUnavailable: 0
## @param joex.resources.limits.cpu CPU limit for the joex pod
## @param joex.resources.limits.memory Memory limit for the joex pod. Make sure to change the `-J-Xmx` argument to reflect the max-memory setting
## @param joex.resources.requests.cpu Requested cpu for the joex pod
## @param joex.resources.requests.memory Requested memory for the joex pod
resources:
limits:
cpu: 1
memory: 3Gi
requests:
cpu: 0.5
memory: 1.5Gi
## @section solr
#
## @param solr.enabled Enable Apache Solr for full-text-search
## @param solr.podAnnotations Annotations for the solr pod
## @param solr.nodeSelector NodeSelector for the solr statefulset
## @param solr.affinity Affinity for the solr statefulset
## @param solr.livenessProbe
solr:
enabled: true
podAnnotations: {}
nodeSelector: {}
affinity: {}
## @param solr.image.registry Image registry, e.g. gcr.io,docker.io
## @param solr.image.repository Image to start for this pod
## @param solr.image.tag Visit [Image tag](https://hub.docker.com/_/solr/tags?page=1&ordering=last_updated). Default is `9`.
## @param solr.image.pullPolicy Image pull policy
image:
registry: ""
repository: solr
tag: "9"
pullPolicy: IfNotPresent
## @param solr.service.type Kubernetes service type for solr traffic
## @param solr.service.port Port number for solr traffic
## @param solr.service.annotations Solr service annotations
## @param solr.service.labels Solr service additional labels
service:
type: ClusterIP
port: 8983
annotations: {}
labels: {}
## @param solr.livenessProbe.enabled Enable liveness probe
## @param solr.livenessProbe.httpGet.port Port for the http get request
## @param solr.livenessProbe.httpGet.path URL path for the http get request
## @param solr.livenessProbe.initialDelaySeconds Initial delay before liveness probe is initiated
## @param solr.livenessProbe.periodSeconds Period for liveness probe
## @param solr.livenessProbe.timoutSeconds Timeout for liveness probe
livenessProbe:
enabled: true
httpGet:
port: 8983
path: /solr/admin/info/system
initialDelaySeconds: 60
periodSeconds: 10
timeoutSeconds: 5
## @param solr.readinessProbe.enabled Enable readiness probe
## @param solr.readinessProbe.httpGet.port Port for the http get request
## @param solr.readinessProbe.httpGet.path URL path for the http get request
## @param solr.readinessProbe.initialDelaySeconds Initial delay before readiness probe is initiated
## @param solr.readinessProbe.periodSeconds Period for readiness probe
## @param solr.readinessProbe.timoutSeconds Timeout for readiness probe
readinessProbe:
enabled: true
httpGet:
path: /solr/admin/info/system
port: 8983
initialDelaySeconds: 10
periodSeconds: 5
timeoutSeconds: 1
## @param solr.resources Kubernetes resouces for solr
resources:
{}
## @param solr.initContainers.resources.limits Kubernetes limits for solr init containers
## @param solr.initContainers.resources.requests.cpu cpu resource limits for solr init containers
## @param solr.initContainers.resources.requests.memory memory limits for solr init containers
initContainers:
resources:
limits: {}
requests:
cpu: 100m
memory: 128Mi
## @param solr.statefulSet.labels Labels for the solr statefulset
## @param solr.statefulSet.annotations Annotations for the solr statefulset to be created
## @param solr.statefulSet.terminationGracePeriodSeconds How long to wait until forcefully kill the solr pod
## @param solr.statefulSet.env Additional environment variables to pass to the solr container
statefulSet:
labels: {}
annotations: {}
terminationGracePeriodSeconds: 60
env: []
# - name: VARIABLE
# value: my-value
## @param solr.persistence.enabled Enable persistence storage for solr
## @param solr.persistence.claimName Use an existing claim to store solr index
## @param solr.persistence.size Size for persistence to store solr index
## @param solr.persistence.accessModes ACcessMode for persistence
## @param solr.persistence.storageClass Name of the storage class to use
## @param solr.persistence.volumeName Name of persistent volume in PVC
## @param solr.persistence.annotations.helm/sh/resource-policy Resource policy for the persistence volume claim
persistence:
enabled: true
claimName: solr-data
size: 5Gi
accessModes: ["ReadWriteOnce"]
storageClass:
volumeName: ""
annotations:
helm.sh/resource-policy: keep
## @section PostgreSQL
#
## @param postgresql.enabled Enable PostgreSQL
## @param postgresql.global.postgresql.auth.password Password for the `dbname` user (overrides `auth.password`)
## @param postgresql.global.postgresql.auth.database Name for a custom database to create (overrides `auth.database`)
## @param postgresql.global.postgresql.auth.username Name for a custom user to create (overrides `auth.username`)
## @param postgresql.global.postgresql.auth.existingSecret Name of an existing Kubernetes secret that contains the postgresql credentials. `auth.password` will be ignored and picked up from this secret
## @param postgresql.global.postgresql.auth.secretKeys.adminPasswordKey Name of key in existing secret to use for PostgreSQL credentials.
## @param postgresql.global.postgresql.auth.secretKeys.userPasswordKey Name of key in existing secret to use for PostgreSQL credentials.
## @param postgresql.global.postgresql.service.ports.postgresql PostgreSQL service port (overrides `service.ports.postgresql`)
## @param postgresql.primary.persistence.size PVC Storage Request for PostgreSQL volume
postgresql:
enabled: true
global:
postgresql:
auth:
database: dbname
username: dbuser
password: dbpass
# existingSecret: postgres-secret
# secretKeys:
# adminPasswordKey: postgres-password
# userPasswordKey: password
service:
postgresql: 5432
primary:
persistence:
size: 10Gi
annotations:
helm.sh/resource-policy: keep

View File

@ -38,9 +38,9 @@ final case class AddonArchive(url: LenientUri, name: String, version: String) {
Files[F].createDirectories(target) *> Files[F].createDirectories(target) *>
reader(url) reader(url)
.through(Zip[F](logger.some).unzip(glob = glob, targetDir = target.some)) .through(Zip[F](logger.some).unzip(glob = glob, targetDir = target.some))
.evalTap(_ => Directory.unwrapSingle[F](logger, target))
.compile .compile
.drain .drain
.flatTap(_ => Directory.unwrapSingle[F](logger, target))
.as(target) .as(target)
} }
} }

View File

@ -110,7 +110,7 @@ private[addons] object RunnerUtil {
): F[AddonResult] = ): F[AddonResult] =
for { for {
stdout <- stdout <-
if (ctx.meta.options.exists(_.collectOutput)) CollectOut.buffer[F] if (ctx.meta.parseResult) CollectOut.buffer[F]
else CollectOut.none[F].pure[F] else CollectOut.none[F].pure[F]
cmdResult <- SysExec(cmd, logger, ctx.baseDir.some) cmdResult <- SysExec(cmd, logger, ctx.baseDir.some)
.flatMap( .flatMap(
@ -135,7 +135,7 @@ private[addons] object RunnerUtil {
out <- stdout.get out <- stdout.get
_ <- logger.debug(s"Addon stdout: $out") _ <- logger.debug(s"Addon stdout: $out")
result = Option result = Option
.when(ctx.meta.options.exists(_.collectOutput) && out.nonEmpty)( .when(ctx.meta.parseResult && out.nonEmpty)(
JsonParser JsonParser
.decode[AddonOutput](out) .decode[AddonOutput](out)
.fold(AddonResult.decodingError, AddonResult.success) .fold(AddonResult.decodingError, AddonResult.success)

View File

@ -9,7 +9,7 @@ package docspell.addons
import cats.effect._ import cats.effect._
import cats.syntax.option._ import cats.syntax.option._
import docspell.common.UrlReader import docspell.common._
import docspell.logging.TestLoggingConfig import docspell.logging.TestLoggingConfig
import munit._ import munit._
@ -42,10 +42,20 @@ class AddonArchiveTest extends CatsEffectSuite with TestLoggingConfig with Fixtu
} yield () } yield ()
} }
tempDir.test("read archive from zip with yaml only") { dir =>
for {
aa <- AddonArchive.read[IO](singleFileAddonUrl, UrlReader.defaultReader[IO], None)
_ = assertEquals(aa.version, "0.7.0")
path <- aa.extractTo(UrlReader.defaultReader[IO], dir)
read <- AddonArchive.read[IO](aa.url, UrlReader.defaultReader[IO], path.some)
_ = assertEquals(aa, read)
} yield ()
}
tempDir.test("Read generated addon from path") { dir => tempDir.test("Read generated addon from path") { dir =>
AddonGenerator.successAddon("mini-addon").use { addon => AddonGenerator.successAddon("mini-addon").use { addon =>
for { for {
archive <- IO(AddonArchive(addon.url, "", "")) archive <- IO(AddonArchive(addon.url, "test-addon", "0.1.0"))
path <- archive.extractTo[IO](UrlReader.defaultReader[IO], dir) path <- archive.extractTo[IO](UrlReader.defaultReader[IO], dir)
read <- AddonArchive.read[IO](addon.url, UrlReader.defaultReader[IO], path.some) read <- AddonArchive.read[IO](addon.url, UrlReader.defaultReader[IO], path.some)

View File

@ -142,7 +142,7 @@ class AddonExecutorTest extends CatsEffectSuite with Fixtures with TestLoggingCo
AddonExecutionResult.executionResultMonoid AddonExecutionResult.executionResultMonoid
.combine( .combine(
AddonExecutionResult.empty, AddonExecutionResult.empty,
AddonExecutionResult(Nil, true) AddonExecutionResult(Nil, pure = true)
) )
.pure .pure
) )

View File

@ -27,9 +27,9 @@ object AddonGenerator {
): Resource[IO, AddonArchive] = ): Resource[IO, AddonArchive] =
output match { output match {
case None => case None =>
generate(name, version, false)("exit 0") generate(name, version, collectOutput = false)("exit 0")
case Some(out) => case Some(out) =>
generate(name, version, true)( generate(name, version, collectOutput = true)(
s""" s"""
|cat <<-EOF |cat <<-EOF
|${out.asJson.noSpaces} |${out.asJson.noSpaces}
@ -77,8 +77,9 @@ object AddonGenerator {
meta = AddonMeta.Meta(name, version, None), meta = AddonMeta.Meta(name, version, None),
triggers = Set(AddonTriggerType.ExistingItem: AddonTriggerType).some, triggers = Set(AddonTriggerType.ExistingItem: AddonTriggerType).some,
args = None, args = None,
runner = runner = AddonMeta
AddonMeta.Runner(None, None, AddonMeta.TrivialRunner(true, "addon.sh").some).some, .Runner(None, None, AddonMeta.TrivialRunner(enable = true, "addon.sh").some)
.some,
options = options =
AddonMeta.Options(networking = !collectOutput, collectOutput = collectOutput).some AddonMeta.Options(networking = !collectOutput, collectOutput = collectOutput).some
) )

View File

@ -35,4 +35,13 @@ class AddonMetaTest extends CatsEffectSuite with TestLoggingConfig with Fixtures
_ = assertEquals(meta, dummyAddonMeta) _ = assertEquals(meta, dummyAddonMeta)
} yield () } yield ()
} }
test("parse yaml with defaults") {
val yamlStr = """meta:
| name: "test"
| version: "0.1.0"
|""".stripMargin
val meta = AddonMeta.fromYamlString(yamlStr).fold(throw _, identity)
assert(meta.parseResult)
}
} }

View File

@ -31,6 +31,9 @@ trait Fixtures extends TestLoggingConfig { self: CatsEffectSuite =>
val miniAddonUrl = val miniAddonUrl =
LenientUri.fromJava(getClass.getResource("/minimal-addon.zip")) LenientUri.fromJava(getClass.getResource("/minimal-addon.zip"))
val singleFileAddonUrl =
LenientUri.fromJava(getClass.getResource("/docspell-addon-single-file.zip"))
val dummyAddonMeta = val dummyAddonMeta =
AddonMeta( AddonMeta(
meta = meta =
@ -40,13 +43,13 @@ trait Fixtures extends TestLoggingConfig { self: CatsEffectSuite =>
), ),
None, None,
runner = Runner( runner = Runner(
nix = NixRunner(true).some, nix = NixRunner(enable = true).some,
docker = DockerRunner( docker = DockerRunner(
enable = true, enable = true,
image = None, image = None,
build = "Dockerfile".some build = "Dockerfile".some
).some, ).some,
trivial = TrivialRunner(true, "src/addon.sh").some trivial = TrivialRunner(enable = true, "src/addon.sh").some
).some, ).some,
options = Options(networking = true, collectOutput = true).some options = Options(networking = true, collectOutput = true).some
) )
@ -55,7 +58,7 @@ trait Fixtures extends TestLoggingConfig { self: CatsEffectSuite =>
Path(s"/tmp/target/test-temp") Path(s"/tmp/target/test-temp")
val tempDir = val tempDir =
ResourceFixture[Path]( ResourceFunFixture[Path](
Resource.eval(Files[IO].createDirectories(baseTempDir)) *> Resource.eval(Files[IO].createDirectories(baseTempDir)) *>
Files[IO] Files[IO]
.tempDirectory(baseTempDir.some, "run-", PosixPermissions.fromOctal("777")) .tempDirectory(baseTempDir.some, "run-", PosixPermissions.fromOctal("777"))
@ -65,7 +68,7 @@ trait Fixtures extends TestLoggingConfig { self: CatsEffectSuite =>
runner: RunnerType, runner: RunnerType,
runners: RunnerType* runners: RunnerType*
): AddonExecutorConfig = { ): AddonExecutorConfig = {
val nspawn = NSpawn(false, "sudo", "systemd-nspawn", Duration.millis(100)) val nspawn = NSpawn(enabled = false, "sudo", "systemd-nspawn", Duration.millis(100))
AddonExecutorConfig( AddonExecutorConfig(
runner = runner :: runners.toList, runner = runner :: runners.toList,
runTimeout = Duration.minutes(2), runTimeout = Duration.minutes(2),

View File

@ -125,6 +125,7 @@ object DateFind {
case Language.Dutch => dmy.or(ymd).or(mdy) case Language.Dutch => dmy.or(ymd).or(mdy)
case Language.Latvian => dmy.or(lavLong).or(ymd) case Language.Latvian => dmy.or(lavLong).or(ymd)
case Language.Japanese => ymd case Language.Japanese => ymd
case Language.JpnVert => ymd
case Language.Hebrew => dmy case Language.Hebrew => dmy
case Language.Lithuanian => ymd case Language.Lithuanian => ymd
case Language.Polish => dmy case Language.Polish => dmy

View File

@ -54,6 +54,8 @@ object MonthName {
latvian latvian
case Language.Japanese => case Language.Japanese =>
japanese japanese
case Language.JpnVert =>
japanese
case Language.Hebrew => case Language.Hebrew =>
hebrew hebrew
case Language.Lithuanian => case Language.Lithuanian =>

View File

@ -22,7 +22,7 @@ import munit._
class StanfordNerAnnotatorSuite extends FunSuite with TestLoggingConfig { class StanfordNerAnnotatorSuite extends FunSuite with TestLoggingConfig {
lazy val germanClassifier = lazy val germanClassifier =
new StanfordCoreNLP(Properties.nerGerman(None, false)) new StanfordCoreNLP(Properties.nerGerman(None, highRecall = false))
lazy val englishClassifier = lazy val englishClassifier =
new StanfordCoreNLP(Properties.nerEnglish(None)) new StanfordCoreNLP(Properties.nerEnglish(None))

View File

@ -90,6 +90,6 @@ object Config {
} }
object Addons { object Addons {
val disabled: Addons = val disabled: Addons =
Addons(false, false, UrlMatcher.False, UrlMatcher.True) Addons(enabled = false, allowImpure = false, UrlMatcher.False, UrlMatcher.True)
} }
} }

View File

@ -127,7 +127,7 @@ object Login {
_ <- logF.trace(s"Account lookup: $data") _ <- logF.trace(s"Account lookup: $data")
res <- data match { res <- data match {
case Some(d) if checkNoPassword(d, Set(AccountSource.OpenId)) => case Some(d) if checkNoPassword(d, Set(AccountSource.OpenId)) =>
doLogin(config, d.account, false) doLogin(config, d.account, rememberMe = false)
case Some(d) if checkNoPassword(d, Set(AccountSource.Local)) => case Some(d) if checkNoPassword(d, Set(AccountSource.Local)) =>
config.onAccountSourceConflict match { config.onAccountSourceConflict match {
case OnAccountSourceConflict.Fail => case OnAccountSourceConflict.Fail =>
@ -145,7 +145,7 @@ object Login {
AccountSource.OpenId AccountSource.OpenId
) )
) )
res <- doLogin(config, d.account, false) res <- doLogin(config, d.account, rememberMe = false)
} yield res } yield res
} }
case _ => case _ =>
@ -212,7 +212,12 @@ object Login {
val okResult: F[Result] = val okResult: F[Result] =
for { for {
_ <- store.transact(RUser.updateLogin(sf.token.account)) _ <- store.transact(RUser.updateLogin(sf.token.account))
newToken <- AuthToken.user(sf.token.account, false, config.serverSecret, None) newToken <- AuthToken.user(
sf.token.account,
requireSecondFactor = false,
config.serverSecret,
None
)
rem <- OptionT rem <- OptionT
.whenF(sf.rememberMe && config.rememberMe.enabled)( .whenF(sf.rememberMe && config.rememberMe.enabled)(
insertRememberToken(store, sf.token.account, config) insertRememberToken(store, sf.token.account, config)
@ -239,7 +244,9 @@ object Login {
(for { (for {
_ <- validateToken _ <- validateToken
key <- EitherT.fromOptionF( key <- EitherT.fromOptionF(
store.transact(RTotp.findEnabledByUserId(sf.token.account.userId, true)), store.transact(
RTotp.findEnabledByUserId(sf.token.account.userId, enabled = true)
),
Result.invalidAuth Result.invalidAuth
) )
now <- EitherT.right[Result](Timestamp.current[F]) now <- EitherT.right[Result](Timestamp.current[F])
@ -255,7 +262,12 @@ object Login {
def okResult(acc: AccountInfo) = def okResult(acc: AccountInfo) =
for { for {
_ <- store.transact(RUser.updateLogin(acc)) _ <- store.transact(RUser.updateLogin(acc))
token <- AuthToken.user(acc, false, config.serverSecret, None) token <- AuthToken.user(
acc,
requireSecondFactor = false,
config.serverSecret,
None
)
} yield Result.ok(token, None) } yield Result.ok(token, None)
def rememberedLogin(rid: Ident) = def rememberedLogin(rid: Ident) =

View File

@ -93,7 +93,7 @@ object AddonOps {
AddonResult.executionFailed( AddonResult.executionFailed(
new Exception(s"Addon run config ${id.id} not found.") new Exception(s"Addon run config ${id.id} not found.")
) :: Nil, ) :: Nil,
false pure = false
) :: Nil, ) :: Nil,
Nil Nil
) )

View File

@ -72,7 +72,7 @@ private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExte
token <- AuthToken.user( token <- AuthToken.user(
account, account,
false, requireSecondFactor = false,
secret.getOrElse(ByteVector.empty), secret.getOrElse(ByteVector.empty),
tokenValidity.some tokenValidity.some
) )

View File

@ -194,7 +194,14 @@ object OCollective {
id <- Ident.randomId[F] id <- Ident.randomId[F]
settings = sett.emptyTrash.getOrElse(EmptyTrash.default) settings = sett.emptyTrash.getOrElse(EmptyTrash.default)
args = EmptyTrashArgs(cid, settings.minAge) args = EmptyTrashArgs(cid, settings.minAge)
ut = UserTask(id, EmptyTrashArgs.taskName, true, settings.schedule, None, args) ut = UserTask(
id,
EmptyTrashArgs.taskName,
enabled = true,
settings.schedule,
None,
args
)
_ <- uts.updateOneTask(UserTaskScope.collective(cid), args.makeSubject.some, ut) _ <- uts.updateOneTask(UserTaskScope.collective(cid), args.makeSubject.some, ut)
_ <- joex.notifyAllNodes _ <- joex.notifyAllNodes
} yield () } yield ()
@ -220,7 +227,7 @@ object OCollective {
ut = UserTask( ut = UserTask(
id, id,
LearnClassifierArgs.taskName, LearnClassifierArgs.taskName,
true, enabled = true,
CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All), CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All),
None, None,
args args
@ -239,7 +246,7 @@ object OCollective {
ut = UserTask( ut = UserTask(
id, id,
EmptyTrashArgs.taskName, EmptyTrashArgs.taskName,
true, enabled = true,
CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All), CalEvent(WeekdayComponent.All, DateEvent.All, TimeEvent.All),
None, None,
args args

View File

@ -114,14 +114,14 @@ object ONotification {
) )
_ <- notMod.send(logbuf._2.andThen(log), ev, ch) _ <- notMod.send(logbuf._2.andThen(log), ev, ch)
logs <- logbuf._1.get logs <- logbuf._1.get
res = SendTestResult(true, logs) res = SendTestResult(success = true, logs)
} yield res).attempt } yield res).attempt
.map { .map {
case Right(res) => res case Right(res) => res
case Left(ex) => case Left(ex) =>
val ev = val ev =
LogEvent.of(Level.Error, "Failed sending sample event").addError(ex) LogEvent.of(Level.Error, "Failed sending sample event").addError(ex)
SendTestResult(false, Vector(ev)) SendTestResult(success = false, Vector(ev))
} }
def listChannels(userId: Ident): F[Vector[Channel]] = def listChannels(userId: Ident): F[Vector[Channel]] =

View File

@ -120,7 +120,9 @@ object OTotp {
def confirmInit(accountId: AccountInfo, otp: OnetimePassword): F[ConfirmResult] = def confirmInit(accountId: AccountInfo, otp: OnetimePassword): F[ConfirmResult] =
for { for {
_ <- log.info(s"Confirm TOTP setup for account ${accountId.asString}") _ <- log.info(s"Confirm TOTP setup for account ${accountId.asString}")
key <- store.transact(RTotp.findEnabledByUserId(accountId.userId, false)) key <- store.transact(
RTotp.findEnabledByUserId(accountId.userId, enabled = false)
)
now <- Timestamp.current[F] now <- Timestamp.current[F]
res <- key match { res <- key match {
case None => case None =>
@ -129,7 +131,7 @@ object OTotp {
val check = totp.checkPassword(r.secret, otp, now.value) val check = totp.checkPassword(r.secret, otp, now.value)
if (check) if (check)
store store
.transact(RTotp.setEnabled(accountId.userId, true)) .transact(RTotp.setEnabled(accountId.userId, enabled = true))
.map(_ => ConfirmResult.Success) .map(_ => ConfirmResult.Success)
else ConfirmResult.Failed.pure[F] else ConfirmResult.Failed.pure[F]
} }
@ -140,7 +142,7 @@ object OTotp {
case Some(pw) => case Some(pw) =>
for { for {
_ <- log.info(s"Validating TOTP, because it is requested to disable it.") _ <- log.info(s"Validating TOTP, because it is requested to disable it.")
key <- store.transact(RTotp.findEnabledByLogin(accountId, true)) key <- store.transact(RTotp.findEnabledByLogin(accountId, enabled = true))
now <- Timestamp.current[F] now <- Timestamp.current[F]
res <- key match { res <- key match {
case None => case None =>
@ -149,7 +151,7 @@ object OTotp {
val check = totp.checkPassword(r.secret, pw, now.value) val check = totp.checkPassword(r.secret, pw, now.value)
if (check) if (check)
UpdateResult.fromUpdate( UpdateResult.fromUpdate(
store.transact(RTotp.setEnabled(r.userId, false)) store.transact(RTotp.setEnabled(r.userId, enabled = false))
) )
else else
log.info(s"TOTP code was invalid. Not disabling it.") *> UpdateResult log.info(s"TOTP code was invalid. Not disabling it.") *> UpdateResult
@ -160,15 +162,15 @@ object OTotp {
case None => case None =>
UpdateResult.fromUpdate { UpdateResult.fromUpdate {
(for { (for {
key <- OptionT(RTotp.findEnabledByLogin(accountId, true)) key <- OptionT(RTotp.findEnabledByLogin(accountId, enabled = true))
n <- OptionT.liftF(RTotp.setEnabled(key.userId, false)) n <- OptionT.liftF(RTotp.setEnabled(key.userId, enabled = false))
} yield n).mapK(store.transform).getOrElse(0) } yield n).mapK(store.transform).getOrElse(0)
} }
} }
def state(acc: AccountInfo): F[OtpState] = def state(acc: AccountInfo): F[OtpState] =
for { for {
record <- store.transact(RTotp.findEnabledByUserId(acc.userId, true)) record <- store.transact(RTotp.findEnabledByUserId(acc.userId, enabled = true))
result = record match { result = record match {
case Some(r) => case Some(r) =>
OtpState.Enabled(r.created) OtpState.Enabled(r.created)

View File

@ -159,7 +159,7 @@ object OUpload {
data.meta.skipDuplicates, data.meta.skipDuplicates,
data.meta.fileFilter.some, data.meta.fileFilter.some,
data.meta.tags.some, data.meta.tags.some,
false, reprocess = false,
data.meta.attachmentsOnly, data.meta.attachmentsOnly,
data.meta.customData data.meta.customData
) )

View File

@ -32,9 +32,12 @@ class AuthTokenTest extends CatsEffectSuite {
val otherSecret = ByteVector.fromValidHex("16bad") val otherSecret = ByteVector.fromValidHex("16bad")
test("validate") { test("validate") {
val token1 = AuthToken.user[IO](user, false, secret, None).unsafeRunSync() val token1 =
AuthToken.user[IO](user, requireSecondFactor = false, secret, None).unsafeRunSync()
val token2 = val token2 =
AuthToken.user[IO](user, false, secret, Duration.seconds(10).some).unsafeRunSync() AuthToken
.user[IO](user, requireSecondFactor = false, secret, Duration.seconds(10).some)
.unsafeRunSync()
assert(token1.validate(secret, Duration.seconds(5))) assert(token1.validate(secret, Duration.seconds(5)))
assert(!token1.validate(otherSecret, Duration.seconds(5))) assert(!token1.validate(otherSecret, Duration.seconds(5)))
assert(!token1.copy(account = john).validate(secret, Duration.seconds(5))) assert(!token1.copy(account = john).validate(secret, Duration.seconds(5)))
@ -46,9 +49,12 @@ class AuthTokenTest extends CatsEffectSuite {
} }
test("signature") { test("signature") {
val token1 = AuthToken.user[IO](user, false, secret, None).unsafeRunSync() val token1 =
AuthToken.user[IO](user, requireSecondFactor = false, secret, None).unsafeRunSync()
val token2 = val token2 =
AuthToken.user[IO](user, false, secret, Duration.seconds(10).some).unsafeRunSync() AuthToken
.user[IO](user, requireSecondFactor = false, secret, Duration.seconds(10).some)
.unsafeRunSync()
assert(token1.sigValid(secret)) assert(token1.sigValid(secret))
assert(token1.sigInvalid(otherSecret)) assert(token1.sigInvalid(otherSecret))

View File

@ -123,6 +123,11 @@ object Language {
val iso3 = "jpn" val iso3 = "jpn"
} }
/*It's not an ISO value, but this needs to be unique and tesseract will need jpn_vert for it's scan from the config of /etc/docspell-joex/docspell-joex.conf.*/
case object JpnVert extends Language {
val iso2 = "ja_vert"
val iso3 = "jpn_vert"
}
case object Hebrew extends Language { case object Hebrew extends Language {
val iso2 = "he" val iso2 = "he"
val iso3 = "heb" val iso3 = "heb"
@ -172,6 +177,7 @@ object Language {
Romanian, Romanian,
Latvian, Latvian,
Japanese, Japanese,
JpnVert,
Hebrew, Hebrew,
Lithuanian, Lithuanian,
Polish, Polish,

View File

@ -78,7 +78,11 @@ case class LenientUri(
.covary[F] .covary[F]
.rethrow .rethrow
.flatMap(url => .flatMap(url =>
fs2.io.readInputStream(Sync[F].delay(url.openStream()), chunkSize, true) fs2.io.readInputStream(
Sync[F].delay(url.openStream()),
chunkSize,
closeAfterUse = true
)
) )
def readText[F[_]: Sync](chunkSize: Int): F[String] = def readText[F[_]: Sync](chunkSize: Int): F[String] =
@ -121,7 +125,7 @@ object LenientUri {
val isRoot = true val isRoot = true
val isEmpty = false val isEmpty = false
def /(seg: String): Path = def /(seg: String): Path =
NonEmptyPath(NonEmptyList.of(seg), false) NonEmptyPath(NonEmptyList.of(seg), trailingSlash = false)
def asString = "/" def asString = "/"
} }
case object EmptyPath extends Path { case object EmptyPath extends Path {
@ -129,7 +133,7 @@ object LenientUri {
val isRoot = false val isRoot = false
val isEmpty = true val isEmpty = true
def /(seg: String): Path = def /(seg: String): Path =
NonEmptyPath(NonEmptyList.of(seg), false) NonEmptyPath(NonEmptyList.of(seg), trailingSlash = false)
def asString = "" def asString = ""
} }
case class NonEmptyPath(segs: NonEmptyList[String], trailingSlash: Boolean) case class NonEmptyPath(segs: NonEmptyList[String], trailingSlash: Boolean)

View File

@ -194,7 +194,7 @@ object MimeType {
val csValueStart = in.substring(n + "charset=".length).trim val csValueStart = in.substring(n + "charset=".length).trim
val csName = csValueStart.indexOf(';') match { val csName = csValueStart.indexOf(';') match {
case -1 => unquote(csValueStart).trim case -1 => unquote(csValueStart).trim
case n => unquote(csValueStart.substring(0, n)).trim case n2 => unquote(csValueStart.substring(0, n2)).trim
} }
if (Charset.isSupported(csName)) Right((Some(Charset.forName(csName)), "")) if (Charset.isSupported(csName)) Right((Some(Charset.forName(csName)), ""))
else Right((None, "")) else Right((None, ""))

View File

@ -1,212 +0,0 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.common
import java.io.InputStream
import java.lang.ProcessBuilder.Redirect
import java.util.concurrent.TimeUnit
import scala.jdk.CollectionConverters._
import cats.effect._
import cats.implicits._
import fs2.io.file.Path
import fs2.{Stream, io, text}
import docspell.common.{exec => newExec}
import docspell.logging.Logger
// better use `SysCmd` and `SysExec`
object SystemCommand {
final case class Config(
program: String,
args: Seq[String],
timeout: Duration,
env: Map[String, String] = Map.empty
) {
def toSysCmd = newExec
.SysCmd(program, newExec.Args(args))
.withTimeout(timeout)
.addEnv(newExec.Env(env))
def mapArgs(f: String => String): Config =
Config(program, args.map(f), timeout)
def replace(repl: Map[String, String]): Config =
mapArgs(s =>
repl.foldLeft(s) { case (res, (k, v)) =>
res.replace(k, v)
}
)
def withEnv(key: String, value: String): Config =
copy(env = env.updated(key, value))
def addEnv(moreEnv: Map[String, String]): Config =
copy(env = env ++ moreEnv)
def appendArgs(extraArgs: Args): Config =
copy(args = args ++ extraArgs.args)
def appendArgs(extraArgs: Seq[String]): Config =
copy(args = args ++ extraArgs)
def toCmd: List[String] =
program :: args.toList
lazy val cmdString: String =
toCmd.mkString(" ")
}
final case class Args(args: Vector[String]) extends Iterable[String] {
override def iterator = args.iterator
def prepend(a: String): Args = Args(a +: args)
def prependWhen(flag: Boolean)(a: String): Args =
prependOption(Option.when(flag)(a))
def prependOption(value: Option[String]): Args =
value.map(prepend).getOrElse(this)
def append(a: String, as: String*): Args =
Args(args ++ (a +: as.toVector))
def appendOption(value: Option[String]): Args =
value.map(append(_)).getOrElse(this)
def appendOptionVal(first: String, second: Option[String]): Args =
second.map(b => append(first, b)).getOrElse(this)
def appendWhen(flag: Boolean)(a: String, as: String*): Args =
if (flag) append(a, as: _*) else this
def appendWhenNot(flag: Boolean)(a: String, as: String*): Args =
if (!flag) append(a, as: _*) else this
def append(p: Path): Args =
append(p.toString)
def append(as: Iterable[String]): Args =
Args(args ++ as.toVector)
}
object Args {
val empty: Args = Args()
def apply(as: String*): Args =
Args(as.toVector)
}
final case class Result(rc: Int, stdout: String, stderr: String)
def exec[F[_]: Sync](
cmd: Config,
logger: Logger[F],
wd: Option[Path] = None,
stdin: Stream[F, Byte] = Stream.empty
): Stream[F, Result] =
startProcess(cmd, wd, logger, stdin) { proc =>
Stream.eval {
for {
_ <- writeToProcess(stdin, proc)
term <- Sync[F].blocking(proc.waitFor(cmd.timeout.seconds, TimeUnit.SECONDS))
_ <-
if (term)
logger.debug(s"Command `${cmd.cmdString}` finished: ${proc.exitValue}")
else
logger.warn(
s"Command `${cmd.cmdString}` did not finish in ${cmd.timeout.formatExact}!"
)
_ <- if (!term) timeoutError(proc, cmd) else Sync[F].pure(())
out <-
if (term) inputStreamToString(proc.getInputStream)
else Sync[F].pure("")
err <-
if (term) inputStreamToString(proc.getErrorStream)
else Sync[F].pure("")
} yield Result(proc.exitValue, out, err)
}
}
def execSuccess[F[_]: Sync](
cmd: Config,
logger: Logger[F],
wd: Option[Path] = None,
stdin: Stream[F, Byte] = Stream.empty
): Stream[F, Result] =
exec(cmd, logger, wd, stdin).flatMap { r =>
if (r.rc != 0)
Stream.raiseError[F](
new Exception(
s"Command `${cmd.cmdString}` returned non-zero exit code ${r.rc}. Stderr: ${r.stderr}"
)
)
else Stream.emit(r)
}
private def startProcess[F[_]: Sync, A](
cmd: Config,
wd: Option[Path],
logger: Logger[F],
stdin: Stream[F, Byte]
)(
f: Process => Stream[F, A]
): Stream[F, A] = {
val log = logger.debug(s"Running external command: ${cmd.cmdString}")
val hasStdin = stdin.take(1).compile.last.map(_.isDefined)
val proc = log *> hasStdin.flatMap(flag =>
Sync[F].blocking {
val pb = new ProcessBuilder(cmd.toCmd.asJava)
.redirectInput(if (flag) Redirect.PIPE else Redirect.INHERIT)
.redirectError(Redirect.PIPE)
.redirectOutput(Redirect.PIPE)
val pbEnv = pb.environment()
cmd.env.foreach { case (key, value) =>
pbEnv.put(key, value)
}
wd.map(_.toNioPath.toFile).foreach(pb.directory)
pb.start()
}
)
Stream
.bracket(proc)(p =>
logger.debug(s"Closing process: `${cmd.cmdString}`").map(_ => p.destroy())
)
.flatMap(f)
}
private def inputStreamToString[F[_]: Sync](in: InputStream): F[String] =
io.readInputStream(Sync[F].pure(in), 16 * 1024, closeAfterUse = false)
.through(text.utf8.decode)
.chunks
.map(_.toVector.mkString)
.fold1(_ + _)
.compile
.last
.map(_.getOrElse(""))
private def writeToProcess[F[_]: Sync](
data: Stream[F, Byte],
proc: Process
): F[Unit] =
data
.through(io.writeOutputStream(Sync[F].blocking(proc.getOutputStream)))
.compile
.drain
private def timeoutError[F[_]: Sync](proc: Process, cmd: Config): F[Unit] =
Sync[F].blocking(proc.destroyForcibly()).attempt *> {
Sync[F].raiseError(
new Exception(
s"Command `${cmd.cmdString}` timed out (${cmd.timeout.formatExact})"
)
)
}
}

View File

@ -62,7 +62,7 @@ object UrlMatcher {
// strip path to only match prefixes // strip path to only match prefixes
val mPath: LenientUri.Path = val mPath: LenientUri.Path =
NonEmptyList.fromList(url.path.segments.take(pathSegmentCount)) match { NonEmptyList.fromList(url.path.segments.take(pathSegmentCount)) match {
case Some(nel) => LenientUri.NonEmptyPath(nel, false) case Some(nel) => LenientUri.NonEmptyPath(nel, trailingSlash = false)
case None => LenientUri.RootPath case None => LenientUri.RootPath
} }

View File

@ -17,6 +17,9 @@ case class Env(values: Map[String, String]) {
def addAll(e: Env): Env = def addAll(e: Env): Env =
Env(values ++ e.values) Env(values ++ e.values)
def modifyValue(f: String => String): Env =
Env(values.view.mapValues(f).toMap)
def ++(e: Env) = addAll(e) def ++(e: Env) = addAll(e)
def foreach(f: (String, String) => Unit): Unit = def foreach(f: (String, String) => Unit): Unit =

View File

@ -0,0 +1,89 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.common.exec
import docspell.common.Duration
import docspell.common.Ident
import docspell.common.exec.Env
import docspell.common.exec.ExternalCommand.ArgMapping
import docspell.common.exec.SysCmd
final case class ExternalCommand(
program: String,
args: Seq[String],
timeout: Duration,
env: Map[String, String] = Map.empty,
argMappings: Map[Ident, ArgMapping] = Map.empty
) {
def withVars(vars: Map[String, String]): ExternalCommand.WithVars =
ExternalCommand.WithVars(this, vars)
import ExternalCommand.pattern
def resolve(vars: Map[String, String]): SysCmd = {
val replace = ExternalCommand.replaceString(vars) _
val resolvedArgMappings =
argMappings.view.mapValues(_.resolve(replace).firstMatch).toMap
val resolvedArgs = args.map(replace).flatMap { arg =>
resolvedArgMappings
.find(e => pattern(e._1.id) == arg)
.map(_._2)
.getOrElse(List(arg))
}
SysCmd(replace(program), resolvedArgs: _*)
.withTimeout(timeout)
.withEnv(_ => Env(env).modifyValue(replace))
}
}
object ExternalCommand {
private val openPattern = "{{"
private val closePattern = "}}"
private def pattern(s: String): String = s"${openPattern}${s}${closePattern}"
def apply(program: String, args: Seq[String], timeout: Duration): ExternalCommand =
ExternalCommand(program, args, timeout, Map.empty, Map.empty)
final case class ArgMapping(
value: String,
mappings: List[ArgMatch]
) {
private[exec] def resolve(replace: String => String): ArgMapping =
ArgMapping(replace(value), mappings.map(_.resolve(replace)))
def firstMatch: List[String] =
mappings.find(am => value.matches(am.matches)).map(_.args).getOrElse(Nil)
}
final case class ArgMatch(
matches: String,
args: List[String]
) {
private[exec] def resolve(replace: String => String): ArgMatch =
ArgMatch(replace(matches), args.map(replace))
}
private def replaceString(vars: Map[String, String])(in: String): String =
vars.foldLeft(in) { case (result, (name, value)) =>
val key = s"{{$name}}"
result.replace(key, value)
}
final case class WithVars(cmd: ExternalCommand, vars: Map[String, String]) {
def resolved: SysCmd = cmd.resolve(vars)
def append(more: (String, String)*): WithVars =
WithVars(cmd, vars ++ more.toMap)
def withVar(key: String, value: String): WithVars =
WithVars(cmd, vars.updated(key, value))
def withVarOption(key: String, value: Option[String]): WithVars =
value.map(withVar(key, _)).getOrElse(this)
}
}

View File

@ -38,6 +38,20 @@ trait SysExec[F[_]] {
def waitFor(timeout: Option[Duration] = None): F[Int] def waitFor(timeout: Option[Duration] = None): F[Int]
/** Uses `waitFor` and throws when return code is non-zero. Logs stderr and stdout while
* waiting.
*/
def runToSuccess(logger: Logger[F], timeout: Option[Duration] = None)(implicit
F: Async[F]
): F[Int]
/** Uses `waitFor` and throws when return code is non-zero. Logs stderr while waiting
* and collects stdout once finished successfully.
*/
def runToSuccessStdout(logger: Logger[F], timeout: Option[Duration] = None)(implicit
F: Async[F]
): F[String]
/** Sends a signal to the process to terminate it immediately */ /** Sends a signal to the process to terminate it immediately */
def cancel: F[Unit] def cancel: F[Unit]
@ -75,6 +89,12 @@ object SysExec {
proc <- startProcess(logger, cmd, workdir, stdin) proc <- startProcess(logger, cmd, workdir, stdin)
fibers <- Resource.eval(Ref.of[F, List[F[Unit]]](Nil)) fibers <- Resource.eval(Ref.of[F, List[F[Unit]]](Nil))
} yield new SysExec[F] { } yield new SysExec[F] {
private lazy val basicName: String =
cmd.program.lastIndexOf(java.io.File.separatorChar.toInt) match {
case n if n > 0 => cmd.program.drop(n + 1)
case _ => cmd.program.takeRight(16)
}
def stdout: Stream[F, Byte] = def stdout: Stream[F, Byte] =
fs2.io.readInputStream( fs2.io.readInputStream(
Sync[F].blocking(proc.getInputStream), Sync[F].blocking(proc.getInputStream),
@ -107,6 +127,39 @@ object SysExec {
) )
} }
def runToSuccess(logger: Logger[F], timeout: Option[Duration])(implicit
F: Async[F]
): F[Int] =
logOutputs(logger, basicName).use(_.waitFor(timeout).flatMap {
case rc if rc == 0 => Sync[F].pure(0)
case rc =>
Sync[F].raiseError(
new Exception(s"Command `${cmd.program}` returned non-zero exit code ${rc}")
)
})
def runToSuccessStdout(logger: Logger[F], timeout: Option[Duration])(implicit
F: Async[F]
): F[String] =
F.background(
stderrLines
.through(line => Stream.eval(logger.debug(s"[$basicName (err)]: $line")))
.compile
.drain
).use { f1 =>
waitFor(timeout)
.flatMap {
case rc if rc == 0 => stdout.through(fs2.text.utf8.decode).compile.string
case rc =>
Sync[F].raiseError[String](
new Exception(
s"Command `${cmd.program}` returned non-zero exit code ${rc}"
)
)
}
.flatTap(_ => f1)
}
def consumeOutputs(out: Pipe[F, String, Unit], err: Pipe[F, String, Unit])(implicit def consumeOutputs(out: Pipe[F, String, Unit], err: Pipe[F, String, Unit])(implicit
F: Async[F] F: Async[F]
): Resource[F, SysExec[F]] = ): Resource[F, SysExec[F]] =

View File

@ -6,6 +6,7 @@
package docspell.common.util package docspell.common.util
import cats.data.OptionT
import cats.effect._ import cats.effect._
import cats.syntax.all._ import cats.syntax.all._
import cats.{Applicative, Monad} import cats.{Applicative, Monad}
@ -26,10 +27,10 @@ object Directory {
(dir :: dirs.toList).traverse_(Files[F].createDirectories(_)) (dir :: dirs.toList).traverse_(Files[F].createDirectories(_))
def nonEmpty[F[_]: Files: Sync](dir: Path): F[Boolean] = def nonEmpty[F[_]: Files: Sync](dir: Path): F[Boolean] =
List( OptionT
Files[F].isDirectory(dir), .whenM(Files[F].isDirectory(dir))(Files[F].list(dir).take(1).compile.toList)
Files[F].list(dir).take(1).compile.last.map(_.isDefined) .map(_.nonEmpty)
).sequence.map(_.forall(identity)) .isDefined
def isEmpty[F[_]: Files: Sync](dir: Path): F[Boolean] = def isEmpty[F[_]: Files: Sync](dir: Path): F[Boolean] =
nonEmpty(dir).map(b => !b) nonEmpty(dir).map(b => !b)

View File

@ -0,0 +1,74 @@
/*
* Copyright 2020 Eike K. & Contributors
*
* SPDX-License-Identifier: AGPL-3.0-or-later
*/
package docspell.common.exec
import docspell.common.Duration
import docspell.common.Ident
import docspell.common.exec.Args
import docspell.common.exec.Env
import docspell.common.exec.ExternalCommand._
import docspell.common.exec.SysCmd
import munit.FunSuite
class ExternalCommandTest extends FunSuite {
test("resolve") {
val cmd = ExternalCommand(
program = "tesseract",
args = "{{infile}}" :: "{{lang-spec}}" :: "out" :: "pdf" :: "txt" :: Nil,
timeout = Duration.minutes(5),
env = Map.empty,
argMappings = Map(
Ident.unsafe("lang-spec") -> ArgMapping(
value = "{{lang}}",
mappings = List(
ArgMatch(
matches = "jpn_vert",
args = List("-l", "jpn_vert", "-c", "preserve_interword_spaces=1")
),
ArgMatch(
matches = ".*",
args = List("-l", "{{lang}}")
)
)
)
)
)
val varsDe = Map("lang" -> "de", "encoding" -> "UTF_8", "infile" -> "text.jpg")
assertEquals(
cmd.resolve(varsDe),
SysCmd(
"tesseract",
Args.of("text.jpg", "-l", "de", "out", "pdf", "txt"),
Env.empty,
Duration.minutes(5)
)
)
val varsJpnVert = varsDe.updated("lang", "jpn_vert")
assertEquals(
cmd.resolve(varsJpnVert),
SysCmd(
"tesseract",
Args.of(
"text.jpg",
"-l",
"jpn_vert",
"-c",
"preserve_interword_spaces=1",
"out",
"pdf",
"txt"
),
Env.empty,
Duration.minutes(5)
)
)
}
}

View File

@ -16,7 +16,7 @@ import munit.CatsEffectSuite
class DirectoryTest extends CatsEffectSuite with TestLoggingConfig { class DirectoryTest extends CatsEffectSuite with TestLoggingConfig {
val logger = docspell.logging.getLogger[IO] val logger = docspell.logging.getLogger[IO]
val tempDir = ResourceFixture( val tempDir = ResourceFunFixture(
Files[IO].tempDirectory(Path("target").some, "directory-test-", None) Files[IO].tempDirectory(Path("target").some, "directory-test-", None)
) )

View File

@ -11,7 +11,8 @@ import cats.implicits._
import fs2.io.file.{Files, Path} import fs2.io.file.{Files, Path}
import fs2.{Pipe, Stream} import fs2.{Pipe, Stream}
import docspell.common._ import docspell.common.exec.ExternalCommand
import docspell.common.exec.SysExec
import docspell.common.util.File import docspell.common.util.File
import docspell.convert.ConversionResult import docspell.convert.ConversionResult
import docspell.convert.ConversionResult.{Handler, successPdf, successPdfTxt} import docspell.convert.ConversionResult.{Handler, successPdf, successPdfTxt}
@ -21,11 +22,11 @@ private[extern] object ExternConv {
def toPDF[F[_]: Async: Files, A]( def toPDF[F[_]: Async: Files, A](
name: String, name: String,
cmdCfg: SystemCommand.Config, cmdCfg: ExternalCommand.WithVars,
wd: Path, wd: Path,
useStdin: Boolean, useStdin: Boolean,
logger: Logger[F], logger: Logger[F],
reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] reader: (Path, Int) => F[ConversionResult[F]]
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = )(in: Stream[F, Byte], handler: Handler[F, A]): F[A] =
Stream Stream
.resource(File.withTempDir[F](wd, s"docspell-$name")) .resource(File.withTempDir[F](wd, s"docspell-$name"))
@ -33,32 +34,21 @@ private[extern] object ExternConv {
val inFile = dir.resolve("infile").absolute.normalize val inFile = dir.resolve("infile").absolute.normalize
val out = dir.resolve("out.pdf").absolute.normalize val out = dir.resolve("out.pdf").absolute.normalize
val sysCfg = val sysCfg =
cmdCfg.replace( cmdCfg
Map( .withVar("outfile", out.toString)
"{{outfile}}" -> out.toString .withVarOption("infile", Option.when(!useStdin)(inFile.toString))
) ++ .resolved
(if (!useStdin) Map("{{infile}}" -> inFile.toString)
else Map.empty)
)
val createInput: Pipe[F, Byte, Unit] = val createInput: Pipe[F, Byte, Unit] =
if (useStdin) _ => Stream.emit(()) if (useStdin) _ => Stream.emit(())
else storeDataToFile(name, logger, inFile) else storeDataToFile(name, logger, inFile)
in.through(createInput).flatMap { _ => in.through(createInput).evalMap { _ =>
SystemCommand SysExec(sysCfg, logger, Some(dir), Option.when(useStdin)(in))
.exec[F]( .flatMap(_.logOutputs(logger, name))
sysCfg, .use { proc =>
logger, proc.waitFor().flatMap(rc => reader(out, rc).flatMap(handler.run))
Some(dir), }
if (useStdin) in
else Stream.empty
)
.evalMap(result =>
logResult(name, result, logger)
.flatMap(_ => reader(out, result))
.flatMap(handler.run)
)
} }
} }
.compile .compile
@ -74,9 +64,9 @@ private[extern] object ExternConv {
def readResult[F[_]: Async: Files]( def readResult[F[_]: Async: Files](
chunkSize: Int, chunkSize: Int,
logger: Logger[F] logger: Logger[F]
)(out: Path, result: SystemCommand.Result): F[ConversionResult[F]] = )(out: Path, result: Int): F[ConversionResult[F]] =
File.existsNonEmpty[F](out).flatMap { File.existsNonEmpty[F](out).flatMap {
case true if result.rc == 0 => case true if result == 0 =>
val outTxt = out.resolveSibling(out.fileName.toString + ".txt") val outTxt = out.resolveSibling(out.fileName.toString + ".txt")
File.existsNonEmpty[F](outTxt).flatMap { File.existsNonEmpty[F](outTxt).flatMap {
case true => case true =>
@ -88,13 +78,13 @@ private[extern] object ExternConv {
successPdf(File.readAll(out, chunkSize)).pure[F] successPdf(File.readAll(out, chunkSize)).pure[F]
} }
case true => case true =>
logger.warn(s"Command not successful (rc=${result.rc}), but file exists.") *> logger.warn(s"Command not successful (rc=${result}), but file exists.") *>
successPdf(File.readAll(out, chunkSize)).pure[F] successPdf(File.readAll(out, chunkSize)).pure[F]
case false => case false =>
ConversionResult ConversionResult
.failure[F]( .failure[F](
new Exception(s"Command result=${result.rc}. No output file found.") new Exception(s"Command result=${result}. No output file found.")
) )
.pure[F] .pure[F]
} }
@ -103,25 +93,25 @@ private[extern] object ExternConv {
outPrefix: String, outPrefix: String,
chunkSize: Int, chunkSize: Int,
logger: Logger[F] logger: Logger[F]
)(out: Path, result: SystemCommand.Result): F[ConversionResult[F]] = { )(out: Path, result: Int): F[ConversionResult[F]] = {
val outPdf = out.resolveSibling(s"$outPrefix.pdf") val outPdf = out.resolveSibling(s"$outPrefix.pdf")
File.existsNonEmpty[F](outPdf).flatMap { File.existsNonEmpty[F](outPdf).flatMap {
case true => case true =>
val outTxt = out.resolveSibling(s"$outPrefix.txt") val outTxt = out.resolveSibling(s"$outPrefix.txt")
File.exists(outTxt).flatMap { txtExists => File.exists(outTxt).flatMap { txtExists =>
val pdfData = File.readAll(out, chunkSize) val pdfData = File.readAll(out, chunkSize)
if (result.rc == 0) if (result == 0)
if (txtExists) successPdfTxt(pdfData, File.readText(outTxt)).pure[F] if (txtExists) successPdfTxt(pdfData, File.readText(outTxt)).pure[F]
else successPdf(pdfData).pure[F] else successPdf(pdfData).pure[F]
else else
logger.warn(s"Command not successful (rc=${result.rc}), but file exists.") *> logger.warn(s"Command not successful (rc=${result}), but file exists.") *>
successPdf(pdfData).pure[F] successPdf(pdfData).pure[F]
} }
case false => case false =>
ConversionResult ConversionResult
.failure[F]( .failure[F](
new Exception(s"Command result=${result.rc}. No output file found.") new Exception(s"Command result=${result}. No output file found.")
) )
.pure[F] .pure[F]
} }
@ -138,14 +128,6 @@ private[extern] object ExternConv {
.drain ++ .drain ++
Stream.eval(storeFile(in, inFile)) Stream.eval(storeFile(in, inFile))
private def logResult[F[_]: Sync](
name: String,
result: SystemCommand.Result,
logger: Logger[F]
): F[Unit] =
logger.debug(s"$name stdout: ${result.stdout}") *>
logger.debug(s"$name stderr: ${result.stderr}")
private def storeFile[F[_]: Async: Files]( private def storeFile[F[_]: Async: Files](
in: Stream[F, Byte], in: Stream[F, Byte],
target: Path target: Path

View File

@ -24,14 +24,16 @@ object OcrMyPdf {
logger: Logger[F] logger: Logger[F]
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = )(in: Stream[F, Byte], handler: Handler[F, A]): F[A] =
if (cfg.enabled) { if (cfg.enabled) {
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] = val reader: (Path, Int) => F[ConversionResult[F]] =
ExternConv.readResult[F](chunkSize, logger) ExternConv.readResult[F](chunkSize, logger)
val cmd = cfg.command.withVars(Map("lang" -> lang.iso3))
ExternConv.toPDF[F, A]( ExternConv.toPDF[F, A](
"ocrmypdf", "ocrmypdf",
cfg.command.replace(Map("{{lang}}" -> lang.iso3)), cmd,
cfg.workingDir, cfg.workingDir,
false, useStdin = false,
logger, logger,
reader reader
)(in, handler) )(in, handler)

View File

@ -8,10 +8,10 @@ package docspell.convert.extern
import fs2.io.file.Path import fs2.io.file.Path
import docspell.common.SystemCommand import docspell.common.exec.ExternalCommand
case class OcrMyPdfConfig( case class OcrMyPdfConfig(
enabled: Boolean, enabled: Boolean,
command: SystemCommand.Config, command: ExternalCommand,
workingDir: Path workingDir: Path
) )

View File

@ -24,17 +24,18 @@ object Tesseract {
logger: Logger[F] logger: Logger[F]
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = { )(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
val outBase = cfg.command.args.tail.headOption.getOrElse("out") val outBase = cfg.command.args.tail.headOption.getOrElse("out")
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] = val reader: (Path, Int) => F[ConversionResult[F]] =
ExternConv.readResultTesseract[F](outBase, chunkSize, logger) ExternConv.readResultTesseract[F](outBase, chunkSize, logger)
val cmd = cfg.command.withVars(Map("lang" -> lang.iso3))
ExternConv.toPDF[F, A]( ExternConv.toPDF[F, A](
"tesseract", "tesseract",
cfg.command.replace(Map("{{lang}}" -> lang.iso3)), cmd,
cfg.workingDir, cfg.workingDir,
false, useStdin = false,
logger, logger,
reader reader
)(in, handler) )(in, handler)
} }
} }

View File

@ -8,6 +8,6 @@ package docspell.convert.extern
import fs2.io.file.Path import fs2.io.file.Path
import docspell.common.SystemCommand import docspell.common.exec.ExternalCommand
case class TesseractConfig(command: SystemCommand.Config, workingDir: Path) case class TesseractConfig(command: ExternalCommand, workingDir: Path)

View File

@ -10,7 +10,6 @@ import cats.effect._
import fs2.Stream import fs2.Stream
import fs2.io.file.{Files, Path} import fs2.io.file.{Files, Path}
import docspell.common._
import docspell.convert.ConversionResult import docspell.convert.ConversionResult
import docspell.convert.ConversionResult.Handler import docspell.convert.ConversionResult.Handler
import docspell.logging.Logger import docspell.logging.Logger
@ -22,14 +21,15 @@ object Unoconv {
chunkSize: Int, chunkSize: Int,
logger: Logger[F] logger: Logger[F]
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = { )(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] = val reader: (Path, Int) => F[ConversionResult[F]] =
ExternConv.readResult[F](chunkSize, logger) ExternConv.readResult[F](chunkSize, logger)
val cmd = cfg.command.withVars(Map.empty)
ExternConv.toPDF[F, A]( ExternConv.toPDF[F, A](
"unoconv", "unoconv",
cfg.command, cmd,
cfg.workingDir, cfg.workingDir,
false, useStdin = false,
logger, logger,
reader reader
)( )(
@ -37,5 +37,4 @@ object Unoconv {
handler handler
) )
} }
} }

View File

@ -8,6 +8,6 @@ package docspell.convert.extern
import fs2.io.file.Path import fs2.io.file.Path
import docspell.common.SystemCommand import docspell.common.exec.ExternalCommand
case class UnoconvConfig(command: SystemCommand.Config, workingDir: Path) case class UnoconvConfig(command: ExternalCommand, workingDir: Path)

View File

@ -27,10 +27,10 @@ object Weasyprint {
sanitizeHtml: SanitizeHtml, sanitizeHtml: SanitizeHtml,
logger: Logger[F] logger: Logger[F]
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = { )(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] = val reader: (Path, Int) => F[ConversionResult[F]] =
ExternConv.readResult[F](chunkSize, logger) ExternConv.readResult[F](chunkSize, logger)
val cmdCfg = cfg.command.replace(Map("{{encoding}}" -> charset.name())) val cmdCfg = cfg.command.withVars(Map("encoding" -> charset.name()))
// html sanitize should (among other) remove links to invalid // html sanitize should (among other) remove links to invalid
// protocols like cid: which is not supported by further // protocols like cid: which is not supported by further
@ -51,5 +51,4 @@ object Weasyprint {
handler handler
) )
} }
} }

View File

@ -8,6 +8,6 @@ package docspell.convert.extern
import fs2.io.file.Path import fs2.io.file.Path
import docspell.common.SystemCommand import docspell.common.exec.ExternalCommand
case class WeasyprintConfig(command: SystemCommand.Config, workingDir: Path) case class WeasyprintConfig(command: ExternalCommand, workingDir: Path)

View File

@ -27,10 +27,10 @@ object WkHtmlPdf {
sanitizeHtml: SanitizeHtml, sanitizeHtml: SanitizeHtml,
logger: Logger[F] logger: Logger[F]
)(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = { )(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = {
val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] = val reader: (Path, Int) => F[ConversionResult[F]] =
ExternConv.readResult[F](chunkSize, logger) ExternConv.readResult[F](chunkSize, logger)
val cmdCfg = cfg.command.replace(Map("{{encoding}}" -> charset.name())) val cmdCfg = cfg.command.withVars(Map("encoding" -> charset.name()))
// html sanitize should (among other) remove links to invalid // html sanitize should (among other) remove links to invalid
// protocols like cid: which is not supported by further // protocols like cid: which is not supported by further
@ -58,5 +58,4 @@ object WkHtmlPdf {
handler handler
) )
} }
} }

View File

@ -8,6 +8,6 @@ package docspell.convert.extern
import fs2.io.file.Path import fs2.io.file.Path
import docspell.common.SystemCommand import docspell.common.exec.ExternalCommand
case class WkHtmlPdfConfig(command: SystemCommand.Config, workingDir: Path) case class WkHtmlPdfConfig(command: ExternalCommand, workingDir: Path)

View File

@ -15,6 +15,7 @@ import cats.implicits._
import fs2.Stream import fs2.Stream
import docspell.common._ import docspell.common._
import docspell.common.exec._
import docspell.common.util.File import docspell.common.util.File
import docspell.convert.ConversionResult.Handler import docspell.convert.ConversionResult.Handler
import docspell.convert.ConvertConfig.HtmlConverter import docspell.convert.ConvertConfig.HtmlConverter
@ -36,7 +37,7 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
3000 * 3000, 3000 * 3000,
MarkdownConfig("body { padding: 2em 5em; }"), MarkdownConfig("body { padding: 2em 5em; }"),
WkHtmlPdfConfig( WkHtmlPdfConfig(
SystemCommand.Config( ExternalCommand(
"wkhtmltopdf", "wkhtmltopdf",
Seq("-s", "A4", "--encoding", "UTF-8", "-", "{{outfile}}"), Seq("-s", "A4", "--encoding", "UTF-8", "-", "{{outfile}}"),
Duration.seconds(20) Duration.seconds(20)
@ -44,7 +45,7 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
target target
), ),
WeasyprintConfig( WeasyprintConfig(
SystemCommand.Config( ExternalCommand(
"weasyprint", "weasyprint",
Seq("--encoding", "UTF-8", "-", "{{outfile}}"), Seq("--encoding", "UTF-8", "-", "{{outfile}}"),
Duration.seconds(20) Duration.seconds(20)
@ -53,7 +54,7 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
), ),
HtmlConverter.Wkhtmltopdf, HtmlConverter.Wkhtmltopdf,
TesseractConfig( TesseractConfig(
SystemCommand.Config( ExternalCommand(
"tesseract", "tesseract",
Seq("{{infile}}", "out", "-l", "deu", "pdf", "txt"), Seq("{{infile}}", "out", "-l", "deu", "pdf", "txt"),
Duration.seconds(20) Duration.seconds(20)
@ -61,7 +62,7 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
target target
), ),
UnoconvConfig( UnoconvConfig(
SystemCommand.Config( ExternalCommand(
"unoconv", "unoconv",
Seq("-f", "pdf", "-o", "{{outfile}}", "{{infile}}"), Seq("-f", "pdf", "-o", "{{outfile}}", "{{infile}}"),
Duration.seconds(20) Duration.seconds(20)
@ -69,8 +70,8 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
target target
), ),
OcrMyPdfConfig( OcrMyPdfConfig(
true, enabled = true,
SystemCommand.Config( ExternalCommand(
"ocrmypdf", "ocrmypdf",
Seq( Seq(
"-l", "-l",
@ -86,7 +87,7 @@ class ConversionTest extends FunSuite with FileChecks with TestLoggingConfig {
), ),
target target
), ),
ConvertConfig.DecryptPdf(true, Nil) ConvertConfig.DecryptPdf(enabled = true, Nil)
) )
val conversion = val conversion =

View File

@ -14,6 +14,7 @@ import cats.effect.unsafe.implicits.global
import fs2.io.file.Path import fs2.io.file.Path
import docspell.common._ import docspell.common._
import docspell.common.exec._
import docspell.common.util.File import docspell.common.util.File
import docspell.convert._ import docspell.convert._
import docspell.files.ExampleFiles import docspell.files.ExampleFiles
@ -27,7 +28,7 @@ class ExternConvTest extends FunSuite with FileChecks with TestLoggingConfig {
val target = File.path(Paths.get("target")) val target = File.path(Paths.get("target"))
test("convert html to pdf") { test("convert html to pdf") {
val cfg = SystemCommand.Config( val cfg = ExternalCommand(
"wkhtmltopdf", "wkhtmltopdf",
Seq("-s", "A4", "--encoding", "UTF-8", "-", "{{outfile}}"), Seq("-s", "A4", "--encoding", "UTF-8", "-", "{{outfile}}"),
Duration.seconds(20) Duration.seconds(20)
@ -53,7 +54,7 @@ class ExternConvTest extends FunSuite with FileChecks with TestLoggingConfig {
} }
test("convert office to pdf") { test("convert office to pdf") {
val cfg = SystemCommand.Config( val cfg = ExternalCommand(
"unoconv", "unoconv",
Seq("-f", "pdf", "-o", "{{outfile}}", "{{infile}}"), Seq("-f", "pdf", "-o", "{{outfile}}", "{{infile}}"),
Duration.seconds(20) Duration.seconds(20)
@ -80,7 +81,7 @@ class ExternConvTest extends FunSuite with FileChecks with TestLoggingConfig {
} }
test("convert image to pdf") { test("convert image to pdf") {
val cfg = SystemCommand.Config( val cfg = ExternalCommand(
"tesseract", "tesseract",
Seq("{{infile}}", "out", "-l", "deu", "pdf", "txt"), Seq("{{infile}}", "out", "-l", "deu", "pdf", "txt"),
Duration.seconds(20) Duration.seconds(20)
@ -105,5 +106,4 @@ class ExternConvTest extends FunSuite with FileChecks with TestLoggingConfig {
) )
.unsafeRunSync() .unsafeRunSync()
} }
} }

View File

@ -10,7 +10,8 @@ import cats.effect._
import fs2.Stream import fs2.Stream
import fs2.io.file.{Files, Path} import fs2.io.file.{Files, Path}
import docspell.common._ import docspell.common.exec.ExternalCommand
import docspell.common.exec.SysExec
import docspell.common.util.File import docspell.common.util.File
import docspell.logging.Logger import docspell.logging.Logger
@ -77,14 +78,17 @@ object Ocr {
else cfg.ghostscript.command.args else cfg.ghostscript.command.args
val cmd = cfg.ghostscript.command val cmd = cfg.ghostscript.command
.copy(args = xargs) .copy(args = xargs)
.replace( .withVars(
Map( Map(
"{{infile}}" -> "-", "infile" -> "-",
"{{outfile}}" -> "%d.tif" "outfile" -> "%d.tif"
) )
) )
SystemCommand .resolved
.execSuccess(cmd, logger, wd = Some(wd), stdin = pdf)
Stream
.resource(SysExec(cmd, logger, Some(wd), Some(pdf)))
.evalMap(_.runToSuccess(logger))
.flatMap(_ => File.listFiles(pathEndsWith(".tif"), wd)) .flatMap(_ => File.listFiles(pathEndsWith(".tif"), wd))
} }
@ -93,18 +97,22 @@ object Ocr {
*/ */
private[extract] def runGhostscriptFile[F[_]: Async: Files]( private[extract] def runGhostscriptFile[F[_]: Async: Files](
pdf: Path, pdf: Path,
ghostscript: SystemCommand.Config, ghostscript: ExternalCommand,
wd: Path, wd: Path,
logger: Logger[F] logger: Logger[F]
): Stream[F, Path] = { ): Stream[F, Path] = {
val cmd = ghostscript.replace( val cmd = ghostscript
Map( .withVars(
"{{infile}}" -> pdf.absolute.toString, Map(
"{{outfile}}" -> "%d.tif" "infile" -> pdf.absolute.toString,
"outfile" -> "%d.tif"
)
) )
) .resolved
SystemCommand
.execSuccess[F](cmd, logger, wd = Some(wd)) Stream
.resource(SysExec(cmd, logger, Some(wd)))
.evalMap(_.runToSuccess(logger))
.flatMap(_ => File.listFiles(pathEndsWith(".tif"), wd)) .flatMap(_ => File.listFiles(pathEndsWith(".tif"), wd))
} }
@ -116,19 +124,23 @@ object Ocr {
*/ */
private[extract] def runUnpaperFile[F[_]: Async]( private[extract] def runUnpaperFile[F[_]: Async](
img: Path, img: Path,
unpaper: SystemCommand.Config, unpaper: ExternalCommand,
wd: Option[Path], wd: Option[Path],
logger: Logger[F] logger: Logger[F]
): Stream[F, Path] = { ): Stream[F, Path] = {
val targetFile = img.resolveSibling("u-" + img.fileName.toString).absolute val targetFile = img.resolveSibling("u-" + img.fileName.toString).absolute
val cmd = unpaper.replace( val cmd = unpaper
Map( .withVars(
"{{infile}}" -> img.absolute.toString, Map(
"{{outfile}}" -> targetFile.toString "infile" -> img.absolute.toString,
"outfile" -> targetFile.toString
)
) )
) .resolved
SystemCommand
.execSuccess[F](cmd, logger, wd = wd) Stream
.resource(SysExec(cmd, logger, wd))
.evalMap(_.runToSuccess(logger))
.map(_ => targetFile) .map(_ => targetFile)
.handleErrorWith { th => .handleErrorWith { th =>
logger logger
@ -150,12 +162,14 @@ object Ocr {
// so use the parent as working dir // so use the parent as working dir
runUnpaperFile(img, config.unpaper.command, img.parent, logger).flatMap { uimg => runUnpaperFile(img, config.unpaper.command, img.parent, logger).flatMap { uimg =>
val cmd = config.tesseract.command val cmd = config.tesseract.command
.replace( .withVars(
Map("{{file}}" -> uimg.fileName.toString, "{{lang}}" -> fixLanguage(lang)) Map("file" -> uimg.fileName.toString, "lang" -> fixLanguage(lang))
) )
SystemCommand .resolved
.execSuccess[F](cmd, logger, wd = uimg.parent)
.map(_.stdout) Stream
.resource(SysExec(cmd, logger, uimg.parent))
.evalMap(_.runToSuccessStdout(logger))
} }
/** Run tesseract on the given image file and return the extracted text. */ /** Run tesseract on the given image file and return the extracted text. */
@ -166,8 +180,12 @@ object Ocr {
config: OcrConfig config: OcrConfig
): Stream[F, String] = { ): Stream[F, String] = {
val cmd = config.tesseract.command val cmd = config.tesseract.command
.replace(Map("{{file}}" -> "stdin", "{{lang}}" -> fixLanguage(lang))) .withVars(Map("file" -> "stdin", "lang" -> fixLanguage(lang)))
SystemCommand.execSuccess(cmd, logger, stdin = img).map(_.stdout) .resolved
Stream
.resource(SysExec(cmd, logger, None, Some(img)))
.evalMap(_.runToSuccessStdout(logger))
} }
private def fixLanguage(lang: String): String = private def fixLanguage(lang: String): String =

View File

@ -6,12 +6,9 @@
package docspell.extract.ocr package docspell.extract.ocr
import java.nio.file.Paths
import fs2.io.file.Path import fs2.io.file.Path
import docspell.common._ import docspell.common.exec.ExternalCommand
import docspell.common.util.File
case class OcrConfig( case class OcrConfig(
maxImageSize: Int, maxImageSize: Int,
@ -25,43 +22,10 @@ object OcrConfig {
case class PageRange(begin: Int) case class PageRange(begin: Int)
case class Ghostscript(command: SystemCommand.Config, workingDir: Path) case class Ghostscript(command: ExternalCommand, workingDir: Path)
case class Tesseract(command: SystemCommand.Config) case class Tesseract(command: ExternalCommand)
case class Unpaper(command: SystemCommand.Config) case class Unpaper(command: ExternalCommand)
val default = OcrConfig(
maxImageSize = 3000 * 3000,
pageRange = PageRange(10),
ghostscript = Ghostscript(
SystemCommand.Config(
"gs",
Seq(
"-dNOPAUSE",
"-dBATCH",
"-dSAFER",
"-sDEVICE=tiffscaled8",
"-sOutputFile={{outfile}}",
"{{infile}}"
),
Duration.seconds(30)
),
File.path(
Paths.get(System.getProperty("java.io.tmpdir")).resolve("docspell-extraction")
)
),
unpaper = Unpaper(
SystemCommand
.Config("unpaper", Seq("{{infile}}", "{{outfile}}"), Duration.seconds(30))
),
tesseract = Tesseract(
SystemCommand
.Config(
"tesseract",
Seq("{{file}}", "stdout", "-l", "{{lang}}"),
Duration.minutes(1)
)
)
)
} }

View File

@ -6,9 +6,14 @@
package docspell.extract.ocr package docspell.extract.ocr
import java.nio.file.Paths
import cats.effect.IO import cats.effect.IO
import cats.effect.unsafe.implicits.global import cats.effect.unsafe.implicits.global
import docspell.common.Duration
import docspell.common.exec.ExternalCommand
import docspell.common.util.File
import docspell.files.TestFiles import docspell.files.TestFiles
import docspell.logging.TestLoggingConfig import docspell.logging.TestLoggingConfig
@ -21,7 +26,7 @@ class TextExtractionSuite extends FunSuite with TestLoggingConfig {
test("extract english pdf".ignore) { test("extract english pdf".ignore) {
val text = TextExtract val text = TextExtract
.extract[IO](letterSourceEN, logger, "eng", OcrConfig.default) .extract[IO](letterSourceEN, logger, "eng", TextExtractionSuite.defaultConfig)
.compile .compile
.lastOrError .lastOrError
.unsafeRunSync() .unsafeRunSync()
@ -31,7 +36,7 @@ class TextExtractionSuite extends FunSuite with TestLoggingConfig {
test("extract german pdf".ignore) { test("extract german pdf".ignore) {
val expect = TestFiles.letterDEText val expect = TestFiles.letterDEText
val extract = TextExtract val extract = TextExtract
.extract[IO](letterSourceDE, logger, "deu", OcrConfig.default) .extract[IO](letterSourceDE, logger, "deu", TextExtractionSuite.defaultConfig)
.compile .compile
.lastOrError .lastOrError
.unsafeRunSync() .unsafeRunSync()
@ -39,3 +44,37 @@ class TextExtractionSuite extends FunSuite with TestLoggingConfig {
assertEquals(extract.value, expect) assertEquals(extract.value, expect)
} }
} }
object TextExtractionSuite {
val defaultConfig = OcrConfig(
maxImageSize = 3000 * 3000,
pageRange = OcrConfig.PageRange(10),
ghostscript = OcrConfig.Ghostscript(
ExternalCommand(
"gs",
Seq(
"-dNOPAUSE",
"-dBATCH",
"-dSAFER",
"-sDEVICE=tiffscaled8",
"-sOutputFile={{outfile}}",
"{{infile}}"
),
Duration.seconds(30)
),
File.path(
Paths.get(System.getProperty("java.io.tmpdir")).resolve("docspell-extraction")
)
),
unpaper = OcrConfig.Unpaper(
ExternalCommand("unpaper", Seq("{{infile}}", "{{outfile}}"), Duration.seconds(30))
),
tesseract = OcrConfig.Tesseract(
ExternalCommand(
"tesseract",
Seq("{{file}}", "stdout", "-l", "{{lang}}"),
Duration.minutes(1)
)
)
)
}

View File

@ -19,7 +19,7 @@ import munit._
class ZipTest extends CatsEffectSuite with TestLoggingConfig { class ZipTest extends CatsEffectSuite with TestLoggingConfig {
val logger = docspell.logging.getLogger[IO] val logger = docspell.logging.getLogger[IO]
val tempDir = ResourceFixture( val tempDir = ResourceFunFixture(
Files[IO].tempDirectory(Path("target").some, "zip-test-", None) Files[IO].tempDirectory(Path("target").some, "zip-test-", None)
) )

View File

@ -201,6 +201,7 @@ object FtsRepository extends DoobieMeta {
case Language.Czech => "simple" case Language.Czech => "simple"
case Language.Latvian => "simple" case Language.Latvian => "simple"
case Language.Japanese => "simple" case Language.Japanese => "simple"
case Language.JpnVert => "simple"
case Language.Hebrew => "simple" case Language.Hebrew => "simple"
case Language.Lithuanian => "simple" case Language.Lithuanian => "simple"
case Language.Polish => "simple" case Language.Polish => "simple"

View File

@ -45,7 +45,7 @@ object SolrMigration {
description, description,
FtsMigration.Result.reIndexAll.pure[F] FtsMigration.Result.reIndexAll.pure[F]
), ),
true dataChangeOnly = true
) )
def indexAll[F[_]: Applicative]( def indexAll[F[_]: Applicative](
@ -59,7 +59,7 @@ object SolrMigration {
description, description,
FtsMigration.Result.indexAll.pure[F] FtsMigration.Result.indexAll.pure[F]
), ),
true dataChangeOnly = true
) )
def apply[F[_]: Functor]( def apply[F[_]: Functor](
@ -74,6 +74,6 @@ object SolrMigration {
description, description,
task.map(_ => FtsMigration.Result.workDone) task.map(_ => FtsMigration.Result.workDone)
), ),
false dataChangeOnly = false
) )
} }

View File

@ -299,14 +299,22 @@ object SolrSetup {
Map("add-field" -> body.asJson).asJson Map("add-field" -> body.asJson).asJson
def string(field: Field): AddField = def string(field: Field): AddField =
AddField(field, "string", true, true, false) AddField(field, "string", stored = true, indexed = true, multiValued = false)
def textGeneral(field: Field): AddField = def textGeneral(field: Field): AddField =
AddField(field, "text_general", true, true, false) AddField(field, "text_general", stored = true, indexed = true, multiValued = false)
def textLang(field: Field, lang: Language): AddField = def textLang(field: Field, lang: Language): AddField =
if (lang == Language.Czech) AddField(field, s"text_cz", true, true, false) if (lang == Language.Czech)
else AddField(field, s"text_${lang.iso2}", true, true, false) AddField(field, s"text_cz", stored = true, indexed = true, multiValued = false)
else
AddField(
field,
s"text_${lang.iso2}",
stored = true,
indexed = true,
multiValued = false
)
} }
case class DeleteField(name: Field) case class DeleteField(name: Field)

Some files were not shown because too many files have changed in this diff Show More