diff --git a/docker/consumedir.dockerfile b/docker/consumedir.dockerfile index dbdc61ca..39079ced 100644 --- a/docker/consumedir.dockerfile +++ b/docker/consumedir.dockerfile @@ -13,7 +13,7 @@ RUN apk add --no-cache curl bash inotify-tools COPY --from=docspell-base-binaries /opt/docspell-tools /opt/docspell-tools -ENTRYPOINT /opt/docspell-tools/consumedir.sh --path /opt/docs -i --iheader Docspell-Integration:$DOCSPELL_HEADER_VALUE -m http://docspell-restserver:7880/api/v1/open/integration/item -v +ENTRYPOINT /opt/docspell-tools/consumedir/consumedir.sh --path /opt/docs -i --iheader Docspell-Integration:$DOCSPELL_HEADER_VALUE -m http://docspell-restserver:7880/api/v1/open/integration/item -v HEALTHCHECK --interval=1m --timeout=10s --retries=2 --start-period=10s \ CMD pgrep inotifywait diff --git a/nix/module-consumedir.nix b/nix/module-consumedir.nix index 66c485e9..f196e9e3 100644 --- a/nix/module-consumedir.nix +++ b/nix/module-consumedir.nix @@ -114,7 +114,7 @@ in { else []) ++ (map (a: "'" + a + "'") cfg.urls); - cmd = "${pkgs.docspell.tools}/bin/consumedir.sh " + (builtins.concatStringsSep " " args); + cmd = "${pkgs.docspell.tools}/bin/ds-consumedir " + (builtins.concatStringsSep " " args); in { description = "Docspell Consumedir"; diff --git a/nix/module-joex.nix b/nix/module-joex.nix index 32c663e9..02775fe8 100644 --- a/nix/module-joex.nix +++ b/nix/module-joex.nix @@ -66,7 +66,6 @@ let preview = { dpi = 32; }; - ocr = { max-image-size = 14000000; page-range = { @@ -97,10 +96,11 @@ let }; }; text-analysis = { - max-length = 10000; + max-length = 5000; nlp = { mode = "full"; clear-interval = "15 minutes"; + max-due-date-years = 10; regex-ner = { max-entries = 1000; file-cache-time = "1 minute"; @@ -108,7 +108,7 @@ let }; classification = { enabled = true; - item-count = 0; + item-count = 600; classifiers = [ { "useSplitWords" = "true"; "splitWordsTokenizerRegexp" = ''[\p{L}][\p{L}0-9]*|(?:\$ ?)?[0-9]+(?:\.[0-9]{2})?%?|\s+|.''; @@ -123,9 +123,6 @@ let }; working-dir = "/tmp/docspell-analysis"; }; - processing = { - max-due-date-years = 10; - }; convert = { chunk-size = 524288; converted-filename-part = "converted"; @@ -816,6 +813,15 @@ in { ''; }; + max-due-date-years = mkOption { + type = types.int; + default = defaults.processing.max-due-date-years; + description = '' + Restricts proposalsfor due dates. Only dates earlier than this + number of years in the future are considered. + ''; + }; + clear-interval = mkOption { type = types.str; default = defaults.text-analysis.nlp.clear-interval; @@ -828,7 +834,7 @@ in { regex-ner = mkOption { type = types.submodule({ options = { - enabled = mkOption { + max-entries = mkOption { type = types.int; default = defaults.text-analysis.regex-ner.max-entries; description = '' @@ -925,23 +931,6 @@ in { description = "Settings for text analysis"; }; - processing = mkOption { - type = types.submodule({ - options = { - max-due-date-years = mkOption { - type = types.int; - default = defaults.processing.max-due-date-years; - description = '' - Restricts proposals for due dates. Only dates earlier than this - number of years in the future are considered. - ''; - }; - }; - }); - default = defaults.processing; - description = "General config for processing documents"; - }; - convert = mkOption { type = types.submodule({ options = { diff --git a/nix/pkg.nix b/nix/pkg.nix index 6ad5b564..5224e5b9 100644 --- a/nix/pkg.nix +++ b/nix/pkg.nix @@ -1,4 +1,4 @@ -cfg: {stdenv, fetchzip, file, curl, inotifyTools, fetchurl, jdk11, bash}: +cfg: {stdenv, fetchzip, file, curl, inotifyTools, fetchurl, jdk11, bash, jq, sqlite}: let meta = with stdenv.lib; { description = "Docspell helps to organize and archive your paper documents."; @@ -60,12 +60,19 @@ in installPhase = '' mkdir -p $out/bin - cp $src/consumedir.sh $out/bin/ cp $src/ds.sh $out/bin/ds - sed -i 's,CURL_CMD="curl",CURL_CMD="${curl}/bin/curl",g' $out/bin/consumedir.sh sed -i 's,CURL_CMD="curl",CURL_CMD="${curl}/bin/curl",g' $out/bin/ds - sed -i 's,INOTIFY_CMD="inotifywait",INOTIFY_CMD="${inotifyTools}/bin/inotifywait",g' $out/bin/consumedir.sh - sed -i 's,FILE_CMD="file",FILE_CMD="${file}/bin/file",g' $out/bin/ds + + while read f; do + target="ds-$(basename "$f" ".sh")" + echo "Installing $f -> $target" + cp "$f" "$out/bin/$target" + sed -i 's,CURL_CMD="curl",CURL_CMD="${curl}/bin/curl",g' $out/bin/$target + sed -i 's,INOTIFY_CMD="inotifywait",INOTIFY_CMD="${inotifyTools}/bin/inotifywait",g' $out/bin/$target + sed -i 's,JQ_CMD="jq",JQ_CMD="${jq}/bin/jq",g' $out/bin/$target + sed -i 's,SQLITE_CMD="sqlite3",SQLITE_CMD="${sqlite}/bin/sqlite3",g' $out/bin/$target + done < <(find . -name "*.sh" -mindepth 2 -not -path "*webextension*") + chmod 755 $out/bin/* ''; diff --git a/tools/consumedir-cleaner/consumedir-cleaner.sh b/tools/consumedir-cleaner/consumedir-cleaner.sh index 1e8c2574..7f367df2 100755 --- a/tools/consumedir-cleaner/consumedir-cleaner.sh +++ b/tools/consumedir-cleaner/consumedir-cleaner.sh @@ -9,7 +9,10 @@ echo echo "#################################################" echo && echo -jq --version > /dev/null +CURL_CMD="curl" +JQ_CMD="jq" + +"$JQ_CMD" --version > /dev/null if [ $? -ne 0 ]; then echo "please install 'jq'" exit -4 @@ -42,7 +45,7 @@ fi ############# FUNCTIONS function curl_call() { - curl_cmd="$1 -H 'X-Docspell-Auth: $ds_token'" + curl_cmd="$CURL_CMD $1 -H 'X-Docspell-Auth: $ds_token'" curl_result=$(eval $curl_cmd) curl_code=$? @@ -60,12 +63,12 @@ function curl_call() { function login() { - curl_call "curl -s -X POST -d '{\"account\": \"$ds_collective/$ds_user\", \"password\": \"$ds_password\"}' ${ds_url}/api/v1/open/auth/login" + curl_call "-s -X POST -d '{\"account\": \"$ds_collective/$ds_user\", \"password\": \"$ds_password\"}' ${ds_url}/api/v1/open/auth/login" - curl_status=$(echo $curl_result | jq -r ".success") + curl_status=$(echo $curl_result | $JQ_CMD -r ".success") if [ "$curl_status" == "true" ]; then - ds_token=$(echo $curl_result | jq -r ".token") + ds_token=$(echo $curl_result | $JQ_CMD -r ".token") echo "Login successfull ( Token: $ds_token )" else @@ -127,8 +130,8 @@ do # check for checksum tmp_checksum=$(sha256sum "$tmp_filepath" | awk '{print $1}') - curl_call "curl -s -X GET '$ds_url/api/v1/sec/checkfile/$tmp_checksum'" - curl_status=$(echo $curl_result | jq -r ".exists") + curl_call "-s -X GET '$ds_url/api/v1/sec/checkfile/$tmp_checksum'" + curl_status=$(echo $curl_result | $JQ_CMD -r ".exists") if [ $curl_code -ne 0 ]; then # error @@ -136,8 +139,8 @@ do # file exists in Docspell elif [ "$curl_status" == "true" ]; then - item_name=$(echo $curl_result | jq -r ".items[0].name") - item_id=$(echo $curl_result | jq -r ".items[0].id") + item_name=$(echo $curl_result | $JQ_CMD -r ".items[0].name") + item_id=$(echo $curl_result | $JQ_CMD -r ".items[0].id") echo "File already exists: '$item_name (ID: $item_id)'" printf "%${#len_resultset}s" " "; printf " " @@ -145,7 +148,7 @@ do echo "... removing file" rm "$tmp_filepath" else - created=$(echo $curl_result | jq -r ".items[0].created") + created=$(echo $curl_result | $JQ_CMD -r ".items[0].created") cur_dir="$ds_archive_path/$(date -d @$(echo "($created+500)/1000" | bc) +%Y-%m )" echo "... moving to archive by month added ('$cur_dir')" @@ -160,8 +163,8 @@ do if [ "$DS_CC_UPLOAD_MISSING" == true ]; then printf "%${#len_resultset}s" " "; printf " " printf "...uploading file.." - curl_call "curl -s -X POST '$ds_url/api/v1/sec/upload/item' -H 'Content-Type: multipart/form-data' -F 'file=@$tmp_filepath'" - curl_status=$(echo $curl_result | jq -r ".success") + curl_call "-s -X POST '$ds_url/api/v1/sec/upload/item' -H 'Content-Type: multipart/form-data' -F 'file=@$tmp_filepath'" + curl_status=$(echo $curl_result | $JQ_CMD -r ".success") if [ "$curl_status" == "true" ]; then echo ". done" else diff --git a/tools/consumedir.sh b/tools/consumedir/consumedir.sh similarity index 100% rename from tools/consumedir.sh rename to tools/consumedir/consumedir.sh diff --git a/tools/convert-all-pdfs.sh b/tools/convert-pdf/convert-all-pdfs.sh similarity index 60% rename from tools/convert-all-pdfs.sh rename to tools/convert-pdf/convert-all-pdfs.sh index 5e47e2e1..a3a8c6d4 100755 --- a/tools/convert-all-pdfs.sh +++ b/tools/convert-pdf/convert-all-pdfs.sh @@ -6,6 +6,9 @@ set -e +CURL_CMD="curl" +JQ_CMD="jq" + BASE_URL="${1:-http://localhost:7880}" LOGIN_URL="$BASE_URL/api/v1/open/auth/login" TRIGGER_URL="$BASE_URL/api/v1/sec/item/convertallpdfs" @@ -18,12 +21,12 @@ echo -n "Password: " read -s PASS echo -auth=$(curl --fail -XPOST --silent --data-binary "{\"account\":\"$USER\", \"password\":\"$PASS\"}" "$LOGIN_URL") +auth=$("$CURL_CMD" --fail -XPOST --silent --data-binary "{\"account\":\"$USER\", \"password\":\"$PASS\"}" "$LOGIN_URL") -if [ "$(echo $auth | jq .success)" == "true" ]; then +if [ "$(echo $auth | "$JQ_CMD" .success)" == "true" ]; then echo "Login successful" - auth_token=$(echo $auth | jq -r .token) - curl --fail -XPOST -H "X-Docspell-Auth: $auth_token" "$TRIGGER_URL" + auth_token=$(echo $auth | "$JQ_CMD" -r .token) + "$CURL_CMD" --fail -XPOST -H "X-Docspell-Auth: $auth_token" "$TRIGGER_URL" else echo "Login failed." fi diff --git a/tools/export-files.sh b/tools/export-files/export-files.sh similarity index 83% rename from tools/export-files.sh rename to tools/export-files/export-files.sh index 6281d5d0..374faf9c 100755 --- a/tools/export-files.sh +++ b/tools/export-files/export-files.sh @@ -47,6 +47,9 @@ # Docspell sends with each file its sha256 checksum via the ETag # header. This is used to do a integrity check after downloading. +CURL_CMD="curl" +JQ_CMD="jq" + if [ -z "$1" ]; then echo "The base-url to docspell is required." @@ -85,12 +88,12 @@ mcurl() { tmpfile1=$(mktemp -t "ds-export.XXXXX") tmpfile2=$(mktemp -t "ds-export.XXXXX") set +e - curl -# --fail --stderr "$tmpfile1" -o "$tmpfile2" -H "X-Docspell-Auth: $auth_token" "$@" + "$CURL_CMD" -# --fail --stderr "$tmpfile1" -o "$tmpfile2" -H "X-Docspell-Auth: $auth_token" "$@" status=$? set -e if [ $status -ne 0 ]; then - errout "curl -H 'X-Docspell-Auth: …' $@" - errout "Curl command failed (rc=$status)! Output is below." + errout "$CURL_CMD -H 'X-Docspell-Auth: …' $@" + errout "curl command failed (rc=$status)! Output is below." cat "$tmpfile1" >&2 cat "$tmpfile2" >&2 rm -f "$tmpfile1" "$tmpfile2" @@ -121,12 +124,12 @@ declare auth_time login() { - auth=$(curl -s --fail -XPOST \ + auth=$("$CURL_CMD" -s --fail -XPOST \ --data-binary "{\"account\":\"$DS_USER\", \"password\":\"$DS_PASS\"}" "$LOGIN_URL") - if [ "$(echo $auth | jq .success)" == "true" ]; then + if [ "$(echo $auth | "$JQ_CMD" .success)" == "true" ]; then errout "Login successful" - auth_token=$(echo $auth | jq -r .token) + auth_token=$(echo $auth | "$JQ_CMD" -r .token) auth_time=$(date +%s) else errout "Login failed." @@ -136,7 +139,7 @@ login() { checkLogin() { elapsed=$((1000 * ($(date +%s) - $auth_time))) - maxtime=$(echo $auth | jq .validMs) + maxtime=$(echo $auth | "$JQ_CMD" .validMs) elapsed=$(($elapsed + 1000)) if [ $elapsed -gt $maxtime ]; then @@ -151,11 +154,11 @@ listItems() { errout "Get next items with offset=$OFFSET, limit=$LIMIT" REQ="{\"offset\":$OFFSET, \"limit\":$LIMIT, \"tagsInclude\":[],\"tagsExclude\":[],\"tagCategoriesInclude\":[], \"tagCategoriesExclude\":[],\"customValues\":[],\"inbox\":false}" - mcurl -XPOST -H 'ContentType: application/json' -d "$REQ" "$SEARCH_URL" | jq -r '.groups[].items[]|.id' + mcurl -XPOST -H 'ContentType: application/json' -d "$REQ" "$SEARCH_URL" | "$JQ_CMD" -r '.groups[].items[]|.id' } fetchItemCount() { - mcurl -XGET "$INSIGHT_URL" | jq '[.incomingCount, .outgoingCount] | add' + mcurl -XGET "$INSIGHT_URL" | "$JQ_CMD" '[.incomingCount, .outgoingCount] | add' } fetchItem() { @@ -174,9 +177,9 @@ downloadAttachment() { rm -f "$attachOut" fi - checksum1=$(curl -s -I -H "X-Docspell-Auth: $auth_token" "$ATTACH_URL/$attachId/original" | \ - grep -i 'etag' | cut -d' ' -f2 | jq -r) - curl -s -o "$attachOut" -H "X-Docspell-Auth: $auth_token" "$ATTACH_URL/$attachId/original" + checksum1=$("$CURL_CMD" -s -I -H "X-Docspell-Auth: $auth_token" "$ATTACH_URL/$attachId/original" | \ + grep -i 'etag' | cut -d' ' -f2 | "$JQ_CMD" -r) + "$CURL_CMD" -s -o "$attachOut" -H "X-Docspell-Auth: $auth_token" "$ATTACH_URL/$attachId/original" checksum2=$(sha256sum "$attachOut" | cut -d' ' -f1 | xargs) if [ "$checksum1" == "$checksum2" ]; then errout " - Checksum ok." @@ -190,10 +193,10 @@ downloadAttachment() { downloadItem() { checkLogin itemData=$(fetchItem "$1") - errout "Get item $(echo $itemData | jq -r .id)" - created=$(echo $itemData|jq '.created') - created=$((($(echo $itemData|jq '.created') + 500) / 1000)) - itemId=$(echo $itemData | jq -r '.id') + errout "Get item $(echo $itemData | "$JQ_CMD" -r .id)" + created=$(echo $itemData|"$JQ_CMD" '.created') + created=$((($(echo $itemData|"$JQ_CMD" '.created') + 500) / 1000)) + itemId=$(echo $itemData | "$JQ_CMD" -r '.id') out="$TARGET/$(date -d @$created +%Y-%m)/$itemId" if [ -d "$out" ] && [ "$DROP_ITEM" == "y" ]; then @@ -209,14 +212,14 @@ downloadItem() { errout " - Removing metadata.json as requested" rm -f "$out/metadata.json" fi - echo $itemData | jq > "$out/metadata.json" + echo $itemData | "$JQ_CMD" > "$out/metadata.json" fi while read attachId attachName; do attachOut="$out/$attachName" checkLogin downloadAttachment "$attachId" - done < <(echo $itemData | jq -r '.sources[] | [.id,.name] | join(" ")') + done < <(echo $itemData | "$JQ_CMD" -r '.sources[] | [.id,.name] | join(" ")') } diff --git a/tools/import-paperless/import-paperless.sh b/tools/import-paperless/import-paperless.sh index aa75dac0..cce6620e 100755 --- a/tools/import-paperless/import-paperless.sh +++ b/tools/import-paperless/import-paperless.sh @@ -5,6 +5,10 @@ # LIMIT_DOC="LIMIT 5" SKIP_EXISTING_DOCS=true +CURL_CMD="curl" +JQ_CMD="jq" +SQLITE_CMD="sqlite3" + echo "##################### START #####################" echo " Docspell - Import from Paperless v '0.3 beta'" @@ -14,7 +18,7 @@ echo echo "#################################################" echo && echo -jq --version > /dev/null +"$JQ_CMD" --version > /dev/null if [ $? -ne 0 ]; then echo "please install 'jq'" exit -4 @@ -62,7 +66,7 @@ if [ "$SKIP_EXISTING_DOCS" == "true" ]; then declare -A doc_skip; fi ############# FUNCTIONS function curl_call() { - curl_cmd="$1 -H 'X-Docspell-Auth: $ds_token'" + curl_cmd="$CURL_CMD $1 -H 'X-Docspell-Auth: $ds_token'" curl_result=$(eval $curl_cmd) if [ "$curl_result" == '"Authentication failed."' ] || [ "$curl_result" == 'Response timed out' ]; then @@ -78,12 +82,12 @@ function curl_call() { } function login() { - curl_call "curl -s -X POST -d '{\"account\": \"$ds_user\", \"password\": \"$ds_password\"}' ${ds_url}/api/v1/open/auth/login" + curl_call "-s -X POST -d '{\"account\": \"$ds_user\", \"password\": \"$ds_password\"}' ${ds_url}/api/v1/open/auth/login" - curl_status=$(echo $curl_result | jq -r ".success") + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".success") if [ "$curl_status" == "true" ]; then - ds_token=$(echo $curl_result | jq -r ".token") + ds_token=$(echo $curl_result | "$JQ_CMD" -r ".token") echo "Login successfull ( Token: $ds_token )" else @@ -110,7 +114,7 @@ for mode in "${modes[@]}"; do else tmp_limit=$LIMIT fi - tmp_resultset=(`sqlite3 -header $db_path "select ${columns[$mode]} from $mode order by 1 DESC $tmp_limit;"`) + tmp_resultset=(`$SQLITE_CMD -header $db_path "select ${columns[$mode]} from $mode order by 1 DESC $tmp_limit;"`) tmp_headers=($(echo "${tmp_resultset[0]}" | tr '|' '\n')) @@ -138,15 +142,15 @@ for mode in "${modes[@]}"; do echo "\"${tmp_result_arr[name]}\" [id: ${tmp_result_arr[id]}]" printf "%${#len_resultset}s" " "; printf " " - curl_call "curl -s -X POST '$ds_url/api/v1/sec/organization' -H 'Content-Type: application/json' -d '{\"id\":\"\",\"name\":\"${tmp_result_arr[name]}\",\"address\":{\"street\":\"\",\"zip\":\"\",\"city\":\"\",\"country\":\"\"},\"contacts\":[],\"created\":0}'" - curl_status=$(echo $curl_result | jq -r ".success") + curl_call "-s -X POST '$ds_url/api/v1/sec/organization' -H 'Content-Type: application/json' -d '{\"id\":\"\",\"name\":\"${tmp_result_arr[name]}\",\"address\":{\"street\":\"\",\"zip\":\"\",\"city\":\"\",\"country\":\"\"},\"contacts\":[],\"created\":0}'" + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".success") if [ "$curl_status" == "true" ]; then echo "Organization successfully created from correspondent" - elif [ "$(echo $curl_result | jq -r '.message')" == "Adding failed, because the entity already exists." ]; then + elif [ "$(echo $curl_result | "$JQ_CMD" -r '.message')" == "Adding failed, because the entity already exists." ]; then echo "Organization already exists, nothing to do" else - echo "FATAL Error during creation of organization: $(echo $curl_result | jq -r '.message')" + echo "FATAL Error during creation of organization: $(echo $curl_result | "$JQ_CMD" -r '.message')" exit 2 fi echo @@ -171,15 +175,15 @@ for mode in "${modes[@]}"; do # check for checksum tmp_checksum=$(sha256sum "$tmp_filepath" | awk '{print $1}') - curl_call "curl -s -X GET '$ds_url/api/v1/sec/checkfile/$tmp_checksum'" - curl_status=$(echo $curl_result | jq -r ".exists") + curl_call "-s -X GET '$ds_url/api/v1/sec/checkfile/$tmp_checksum'" + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".exists") # upload if not existent if [ $? -eq 0 ] && [ "$curl_status" == "false" ]; then echo -n "File does not exist, uploading.." - curl_call "curl -s -X POST '$ds_url/api/v1/sec/upload/item' -H 'Content-Type: multipart/form-data' -F 'file=@$tmp_filepath;type=application/${tmp_result_arr[file_type]}'" + curl_call "-s -X POST '$ds_url/api/v1/sec/upload/item' -H 'Content-Type: multipart/form-data' -F 'file=@$tmp_filepath;type=application/${tmp_result_arr[file_type]}'" - curl_status=$(echo $curl_result | jq -r ".success") + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".success") if [ "$curl_status" == "true" ]; then printf ". ." @@ -206,13 +210,13 @@ for mode in "${modes[@]}"; do countMax=25 while [ $count -le $countMax ]; do # get Docspell id of document - curl_call "curl -s -X GET '$ds_url/api/v1/sec/checkfile/$tmp_checksum'" - curl_status=$(echo $curl_result | jq -r ".exists") + curl_call "-s -X GET '$ds_url/api/v1/sec/checkfile/$tmp_checksum'" + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".exists") res=$? # file id returned if [ $res -eq 0 ] && [ "$curl_status" == "true" ]; then - curl_status=$(echo $curl_result | jq -r ".items[0].id") + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".items[0].id") # paperless id to docspell id for later use pl2ds_id[${tmp_result_arr[id]}]=$curl_status echo ".done" @@ -245,19 +249,19 @@ for mode in "${modes[@]}"; do printf "Set link to organization \"${corr2name[${tmp_result_arr[correspondent_id]}]}\" .." # get organizations matching doc's orga (can be several when parts match) - curl_call "curl -s -X GET '$ds_url/api/v1/sec/organization' -G --data-urlencode 'q=${corr2name[${tmp_result_arr[correspondent_id]}]}'" + curl_call "-s -X GET '$ds_url/api/v1/sec/organization' -G --data-urlencode 'q=${corr2name[${tmp_result_arr[correspondent_id]}]}'" # Search for exact match of paperless correspondent in fetched organizations from Docspell - curl_status=$(echo $curl_result | jq -r ".items[] | select(.name==\"${corr2name[${tmp_result_arr[correspondent_id]}]}\") | .name") + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".items[] | select(.name==\"${corr2name[${tmp_result_arr[correspondent_id]}]}\") | .name") # double-check that found organization matches doc's correspondent if [ "$curl_status" == "${corr2name[${tmp_result_arr[correspondent_id]}]}" ]; then - curl_status=$(echo $curl_result | jq -r ".items[] | select(.name==\"${corr2name[${tmp_result_arr[correspondent_id]}]}\") | .id") + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".items[] | select(.name==\"${corr2name[${tmp_result_arr[correspondent_id]}]}\") | .id") # Set actual link to document - curl_call "curl -s -X PUT '$ds_url/api/v1/sec/item/${pl2ds_id[${tmp_result_arr[id]}]}/corrOrg' -H 'Content-Type: application/json' -d '{\"id\":\"$curl_status\"}'" + curl_call "-s -X PUT '$ds_url/api/v1/sec/item/${pl2ds_id[${tmp_result_arr[id]}]}/corrOrg' -H 'Content-Type: application/json' -d '{\"id\":\"$curl_status\"}'" - curl_status=$(echo $curl_result | jq -r ".success") + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".success") if [ "$curl_status" == "true" ]; then echo ". done" @@ -280,9 +284,9 @@ for mode in "${modes[@]}"; do # Set name of document printf "%${#len_resultset}s" " "; printf " " - curl_call "curl -s -X PUT '$ds_url/api/v1/sec/item/${pl2ds_id[${tmp_result_arr[id]}]}/name' -H 'Content-Type: application/json' -d '{\"text\":\"${tmp_result_arr[title]}\"}'" + curl_call "-s -X PUT '$ds_url/api/v1/sec/item/${pl2ds_id[${tmp_result_arr[id]}]}/name' -H 'Content-Type: application/json' -d '{\"text\":\"${tmp_result_arr[title]}\"}'" - curl_status=$(echo $curl_result | jq -r ".success") + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".success") if [ "$curl_status" == "true" ]; then echo "Set name of item: \"${tmp_result_arr[title]}\"" @@ -296,9 +300,9 @@ for mode in "${modes[@]}"; do printf "%${#len_resultset}s" " "; printf " " tmp_date="${tmp_result_arr[created]:0:10} 12:00:00" #fix for timezone variations - curl_call "curl -s -X PUT '$ds_url/api/v1/sec/item/${pl2ds_id[${tmp_result_arr[id]}]}/date' -H 'Content-Type: application/json' -d '{\"date\":$( echo "$(date -d "$tmp_date" +%s) * 1000" | bc )}'" + curl_call "-s -X PUT '$ds_url/api/v1/sec/item/${pl2ds_id[${tmp_result_arr[id]}]}/date' -H 'Content-Type: application/json' -d '{\"date\":$( echo "$(date -d "$tmp_date" +%s) * 1000" | bc )}'" - curl_status=$(echo $curl_result | jq -r ".success") + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".success") if [ "$curl_status" == "true" ]; then echo "Set creation date of item: \"${tmp_date:0:10}\"" @@ -319,15 +323,15 @@ for mode in "${modes[@]}"; do # paperless tag id to name for later use tag2name[${tmp_result_arr[id]}]=${tmp_result_arr[name]} - curl_call "curl -s -X POST '$ds_url/api/v1/sec/tag' -H 'Content-Type: application/json' -d '{\"id\":\"ignored\",\"name\":\"${tmp_result_arr[name]}\",\"category\":\"imported (pl)\",\"created\":0}'" + curl_call "-s -X POST '$ds_url/api/v1/sec/tag' -H 'Content-Type: application/json' -d '{\"id\":\"ignored\",\"name\":\"${tmp_result_arr[name]}\",\"category\":\"imported (pl)\",\"created\":0}'" - curl_status=$(echo $curl_result | jq -r ".success") + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".success") if [ "$curl_status" == "true" ]; then echo "Tag successfully created" - elif [ "$(echo $curl_result | jq -r '.message')" == "A tag '${tmp_result_arr[name]}' already exists" ]; then + elif [ "$(echo $curl_result | "$JQ_CMD" -r '.message')" == "A tag '${tmp_result_arr[name]}' already exists" ]; then echo "Tag already exists, nothing to do" else - echo "FATAL Error during creation of tag: $(echo $curl_result | jq -r '.message')" + echo "FATAL Error during creation of tag: $(echo $curl_result | "$JQ_CMD" -r '.message')" exit 9 fi else @@ -344,9 +348,9 @@ for mode in "${modes[@]}"; do printf "%${#len_resultset}s" " "; printf " " #link tags to documents - curl_call "curl -s -X PUT '$ds_url/api/v1/sec/item/${pl2ds_id[${tmp_result_arr[document_id]}]}/taglink' -H 'Content-Type: application/json' -d '{\"items\":[\"${tag2name[${tmp_result_arr[tag_id]}]}\"]}'" + curl_call "-s -X PUT '$ds_url/api/v1/sec/item/${pl2ds_id[${tmp_result_arr[document_id]}]}/taglink' -H 'Content-Type: application/json' -d '{\"items\":[\"${tag2name[${tmp_result_arr[tag_id]}]}\"]}'" - curl_status=$(echo $curl_result | jq -r ".success") + curl_status=$(echo $curl_result | "$JQ_CMD" -r ".success") if [ "$curl_status" == "true" ]; then echo '...applied' else diff --git a/tools/preview/regenerate-previews.sh b/tools/preview/regenerate-previews.sh index b439a8e0..cc474c66 100755 --- a/tools/preview/regenerate-previews.sh +++ b/tools/preview/regenerate-previews.sh @@ -6,6 +6,10 @@ set -e +CURL_CMD="curl" +JQ_CMD="jq" + + BASE_URL="${1:-http://localhost:7880}" LOGIN_URL="$BASE_URL/api/v1/open/auth/login" TRIGGER_URL="$BASE_URL/api/v1/sec/collective/previews" @@ -18,11 +22,11 @@ echo -n "Password: " read -s PASS echo -auth=$(curl --fail -XPOST --silent --data-binary "{\"account\":\"$USER\", \"password\":\"$PASS\"}" "$LOGIN_URL") +auth=$("$CURL_CMD" --fail -XPOST --silent --data-binary "{\"account\":\"$USER\", \"password\":\"$PASS\"}" "$LOGIN_URL") -if [ "$(echo $auth | jq .success)" == "true" ]; then +if [ "$(echo $auth | $JQ_CMD .success)" == "true" ]; then echo "Login successful" - auth_token=$(echo $auth | jq -r .token) + auth_token=$(echo $auth | "$JQ_CMD" -r .token) curl --fail -XPOST -H "X-Docspell-Auth: $auth_token" "$TRIGGER_URL" else echo "Login failed." diff --git a/tools/reset-password/reset-password.sh b/tools/reset-password/reset-password.sh index b80bf4c8..01d377be 100755 --- a/tools/reset-password/reset-password.sh +++ b/tools/reset-password/reset-password.sh @@ -9,6 +9,9 @@ # ./reset-password.sh http://localhost:7880 test123 your/account # +CURL_CMD="curl" +JQ_CMD="jq" + if [ -z "$1" ]; then echo "The docspell base-url is required as first argument." exit 1 @@ -32,15 +35,15 @@ fi RESET_URL="${BASE_URL}/api/v1/admin/user/resetPassword" -OUT=$(curl -s -XPOST \ +OUT=$("$CURL_CMD" -s -XPOST \ -H "Docspell-Admin-Secret: $SECRET" \ -H "Content-Type: application/json" \ -d "{\"account\": \"$USER\"}" \ "$RESET_URL") -if command -v jq > /dev/null; then - echo $OUT | jq +if command -v "$JQ_CMD" > /dev/null; then + echo $OUT | "$JQ_CMD" else echo $OUT fi