mirror of
https://github.com/TheAnachronism/docspell.git
synced 2025-08-05 02:24:52 +00:00
Remove obsolete shell scripts
The commandline interface now supersedes these scripts.
This commit is contained in:
@ -1,181 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "##################### START #####################"
|
||||
|
||||
echo " Docspell Consumedir Cleaner - v0.1 beta"
|
||||
echo " by totti4ever" && echo
|
||||
echo " $(date)"
|
||||
echo
|
||||
echo "#################################################"
|
||||
echo && echo
|
||||
|
||||
CURL_CMD="curl"
|
||||
JQ_CMD="jq"
|
||||
|
||||
"$JQ_CMD" --version > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "please install 'jq'"
|
||||
exit -4
|
||||
fi
|
||||
|
||||
ds_url=${1%/}
|
||||
ds_user_param=$2
|
||||
ds_user=${ds_user_param#*/}
|
||||
ds_collective=${ds_user_param%%/*}
|
||||
ds_password=$3
|
||||
ds_consumedir_path=${4%/}
|
||||
ds_archive_path=$ds_consumedir_path/_archive/$ds_collective
|
||||
|
||||
|
||||
if [ $# -ne 4 ]; then
|
||||
echo "FATAL Exactly four parameters needed"
|
||||
exit -3
|
||||
elif [ "$1" == "" ] || [ "$2" == "" ] || [ "$3" == "" ] || [ "$4" == "" ]; then
|
||||
echo "FATAL Parameter missing"
|
||||
echo " ds_url: $ds_url"
|
||||
echo " ds_user: $ds_user"
|
||||
echo " ds_password: $ds_password"
|
||||
echo " ds_consumedir_path: $ds_consumedir_path"
|
||||
exit -2
|
||||
elif [ "$ds_collective" == "_archive" ]; then
|
||||
echo "FATAL collective name '_archive' is not supported by this script"
|
||||
exit -1
|
||||
fi
|
||||
|
||||
|
||||
############# FUNCTIONS
|
||||
function curl_call() {
|
||||
curl_cmd="$CURL_CMD $1 -H 'X-Docspell-Auth: $ds_token'"
|
||||
curl_result=$(eval $curl_cmd)
|
||||
curl_code=$?
|
||||
|
||||
if [ "$curl_result" == '"Authentication failed."' ] || [ "$curl_result" == 'Response timed out' ]; then
|
||||
printf "\nNew login required ($curl_result)... "
|
||||
login
|
||||
printf "%${#len_resultset}s" " "; printf " .."
|
||||
curl_call $1
|
||||
|
||||
elif [ "$curl_result" == "Bad Gateway" ] || [ "$curl_result" == '404 page not found' ]; then
|
||||
echo "FATAL Connection to server failed"
|
||||
exit -1
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function login() {
|
||||
curl_call "-s -X POST -d '{\"account\": \"$ds_collective/$ds_user\", \"password\": \"$ds_password\"}' ${ds_url}/api/v1/open/auth/login"
|
||||
|
||||
curl_status=$(echo $curl_result | $JQ_CMD -r ".success")
|
||||
|
||||
if [ "$curl_status" == "true" ]; then
|
||||
ds_token=$(echo $curl_result | $JQ_CMD -r ".token")
|
||||
echo "Login successfull ( Token: $ds_token )"
|
||||
|
||||
else
|
||||
echo "FATAL Login not succesfull"
|
||||
exit 1
|
||||
|
||||
fi
|
||||
}
|
||||
|
||||
############# END
|
||||
|
||||
echo "Settings:"
|
||||
if [ "$DS_CC_REMOVE" == "true" ]; then
|
||||
echo " ### !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ###"
|
||||
echo " - DELETE files? YES"
|
||||
echo " when already existing in Docspell. This cannot be undone!"
|
||||
echo " ### !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ###"
|
||||
else
|
||||
echo " - DELETE files? no"
|
||||
echo " moving already uploaded files to archive"
|
||||
fi
|
||||
echo
|
||||
if [ "$DS_CC_UPLOAD_MISSING" == true ]; then
|
||||
echo " - UPLOAD files? YES"
|
||||
echo " files not existing in Docspell will be uploaded and will be re-checked in the next run."
|
||||
else
|
||||
echo " - UPLOAD files? no"
|
||||
echo " files not existing in Docspell will NOT be uploaded and stay where they are."
|
||||
fi
|
||||
echo && echo
|
||||
echo "Press 'ctrl+c' to cancel"
|
||||
for ((i=9;i>=0;i--)); do
|
||||
printf "\r waiting $i seconds "
|
||||
sleep 1s
|
||||
done
|
||||
echo && echo
|
||||
|
||||
# login, get token
|
||||
login
|
||||
|
||||
echo "Scanning folder for collective '$ds_collective' ($ds_consumedir_path/$ds_collective)"
|
||||
echo && echo
|
||||
|
||||
while read -r line
|
||||
do
|
||||
tmp_filepath=$line
|
||||
|
||||
if [ "$tmp_filepath" == "" ]; then
|
||||
echo "no files found" && echo
|
||||
exit 0 #no results
|
||||
elif [ ! -f "$tmp_filepath" ]; then
|
||||
echo "FATAL no access to file: $tmp_filepath"
|
||||
exit 3
|
||||
fi
|
||||
|
||||
echo "Checking '$tmp_filepath'"
|
||||
printf "%${#len_resultset}s" " "; printf " "
|
||||
|
||||
# check for checksum
|
||||
tmp_checksum=$(sha256sum "$tmp_filepath" | awk '{print $1}')
|
||||
|
||||
curl_call "-s -X GET '$ds_url/api/v1/sec/checkfile/$tmp_checksum'"
|
||||
curl_status=$(echo $curl_result | $JQ_CMD -r ".exists")
|
||||
|
||||
if [ $curl_code -ne 0 ]; then
|
||||
# error
|
||||
echo "ERROR $curl_result // $curl_status"
|
||||
|
||||
# file exists in Docspell
|
||||
elif [ "$curl_status" == "true" ]; then
|
||||
item_name=$(echo $curl_result | $JQ_CMD -r ".items[0].name")
|
||||
item_id=$(echo $curl_result | $JQ_CMD -r ".items[0].id")
|
||||
echo "File already exists: '$item_name (ID: $item_id)'"
|
||||
|
||||
printf "%${#len_resultset}s" " "; printf " "
|
||||
if [ "$DS_CC_REMOVE" == "true" ]; then
|
||||
echo "... removing file"
|
||||
rm "$tmp_filepath"
|
||||
else
|
||||
created=$(echo $curl_result | $JQ_CMD -r ".items[0].created")
|
||||
cur_dir="$ds_archive_path/$(date -d @$(expr \( $created + 500 \) / 1000) +%Y-%m)"
|
||||
echo "... moving to archive by month added ('$cur_dir')"
|
||||
mkdir -p "$cur_dir"
|
||||
mv "$tmp_filepath" "$cur_dir/"
|
||||
fi
|
||||
|
||||
# file does not exist in Docspell
|
||||
else
|
||||
|
||||
echo "Files does not exist, yet"
|
||||
if [ "$DS_CC_UPLOAD_MISSING" == true ]; then
|
||||
printf "%${#len_resultset}s" " "; printf " "
|
||||
printf "...uploading file.."
|
||||
curl_call "-s -X POST '$ds_url/api/v1/sec/upload/item' -H 'Content-Type: multipart/form-data' -F 'file=@$tmp_filepath'"
|
||||
curl_status=$(echo $curl_result | $JQ_CMD -r ".success")
|
||||
if [ "$curl_status" == "true" ]; then
|
||||
echo ". done"
|
||||
else
|
||||
echo -e "\nERROR $curl_result"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
echo
|
||||
done \
|
||||
<<< $(find $ds_consumedir_path/$ds_collective -type f)
|
||||
|
||||
|
||||
echo ################# DONE #################
|
||||
date
|
@ -1,439 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This script watches a directory for new files and uploads them to
|
||||
# docspell. Or it uploads all files currently in the directory.
|
||||
#
|
||||
# It requires inotifywait, curl and sha256sum if the `-m' option is
|
||||
# used.
|
||||
|
||||
# saner programming env: these switches turn some bugs into errors
|
||||
set -o errexit -o pipefail -o noclobber -o nounset
|
||||
|
||||
CURL_CMD="curl"
|
||||
INOTIFY_CMD="inotifywait"
|
||||
SHA256_CMD="sha256sum"
|
||||
MKTEMP_CMD="mktemp"
|
||||
CURL_OPTS=${CURL_OPTS:-}
|
||||
|
||||
! getopt --test > /dev/null
|
||||
if [[ ${PIPESTATUS[0]} -ne 4 ]]; then
|
||||
echo 'I’m sorry, `getopt --test` failed in this environment.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
OPTIONS=omhdp:vrmi
|
||||
LONGOPTS=once,distinct,help,delete,path:,verbose,recursive,dry,integration,iuser:,iheader:,poll:,exclude:,include:
|
||||
|
||||
! PARSED=$(getopt --options=$OPTIONS --longoptions=$LONGOPTS --name "$0" -- "$@")
|
||||
if [[ ${PIPESTATUS[0]} -ne 0 ]]; then
|
||||
# e.g. return value is 1
|
||||
# then getopt has complained about wrong arguments to stdout
|
||||
exit 2
|
||||
fi
|
||||
|
||||
# read getopt’s output this way to handle the quoting right:
|
||||
eval set -- "$PARSED"
|
||||
|
||||
declare -a watchdir
|
||||
help=n verbose=n delete=n once=n distinct=n recursive=n dryrun=n
|
||||
integration=n iuser="" iheader="" poll="" exclude="" include=""
|
||||
while true; do
|
||||
case "$1" in
|
||||
-h|--help)
|
||||
help=y
|
||||
shift
|
||||
;;
|
||||
-v|--verbose)
|
||||
verbose=y
|
||||
shift
|
||||
;;
|
||||
-d|--delete)
|
||||
delete=y
|
||||
shift
|
||||
;;
|
||||
-o|--once)
|
||||
once=y
|
||||
shift
|
||||
;;
|
||||
-p|--path)
|
||||
watchdir+=("$2")
|
||||
shift 2
|
||||
;;
|
||||
-m|--distinct)
|
||||
distinct=y
|
||||
shift
|
||||
;;
|
||||
-r|--recursive)
|
||||
recursive=y
|
||||
shift
|
||||
;;
|
||||
--dry)
|
||||
dryrun=y
|
||||
shift
|
||||
;;
|
||||
-i|--integration)
|
||||
integration=y
|
||||
recursive=y
|
||||
shift
|
||||
;;
|
||||
--iuser)
|
||||
iuser="$2"
|
||||
shift 2
|
||||
;;
|
||||
--iheader)
|
||||
iheader="$2"
|
||||
shift 2
|
||||
;;
|
||||
--poll)
|
||||
poll="$2"
|
||||
shift 2
|
||||
;;
|
||||
--exclude)
|
||||
exclude="$2"
|
||||
shift 2
|
||||
;;
|
||||
--include)
|
||||
include="$2"
|
||||
shift 2
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
;;
|
||||
*)
|
||||
echo "Programming error"
|
||||
exit 3
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
|
||||
showUsage() {
|
||||
echo "Upload files in a directory"
|
||||
echo ""
|
||||
echo "Usage: $0 [options] url url ..."
|
||||
echo
|
||||
echo "Options:"
|
||||
echo " -v | --verbose Print more to stdout. (value: $verbose)"
|
||||
echo " -d | --delete Delete the file if successfully uploaded. (value: $delete)"
|
||||
echo " -p | --path <dir> The directories to watch. This is required. (value: ${watchdir[@]})"
|
||||
echo " -h | --help Prints this help text. (value: $help)"
|
||||
echo " -m | --distinct Optional. Upload only if the file doesn't already exist. (value: $distinct)"
|
||||
echo " -o | --once Instead of watching, upload all files in that dir. (value: $once)"
|
||||
echo " --poll <sec> Run the script periodically instead of watching a directory. This can be"
|
||||
echo " used if watching via inotify is not possible. (value: $poll)"
|
||||
echo " -r | --recursive Traverse the directory(ies) recursively (value: $recursive)"
|
||||
echo " -i | --integration Upload to the integration endpoint. It implies -r. This puts the script in"
|
||||
echo " a different mode, where the first subdirectory of any given starting point"
|
||||
echo " is read as the collective name. The url(s) are completed with this name in"
|
||||
echo " order to upload files to the respective collective. So each directory"
|
||||
echo " given is expected to contain one subdirectory per collective and the urls"
|
||||
echo " are expected to identify the integration endpoint, which is"
|
||||
echo " /api/v1/open/integration/item/<collective-name>. (value: $integration)"
|
||||
echo " --iheader The header name and value to use with the integration endpoint. This must be"
|
||||
echo " in form 'headername:value'. Only used if '-i' is supplied."
|
||||
echo " (value: $iheader)"
|
||||
echo " --iuser The username and password for basic auth to use with the integration"
|
||||
echo " endpoint. This must be of form 'user:pass'. Only used if '-i' is supplied."
|
||||
echo " (value: $iuser)"
|
||||
echo " --exclude <glob> A shell glob pattern that is used to skip files that match (value: $exclude)."
|
||||
echo " --include <glob> A shell glob pattern that is used to find files to upload (value: $include)."
|
||||
echo " If --exclude and --include is given, both apply."
|
||||
echo " --dry Do a 'dry run', not uploading anything only printing to stdout (value: $dryrun)"
|
||||
echo ""
|
||||
echo "Arguments:"
|
||||
echo " A list of URLs to upload the files to."
|
||||
echo ""
|
||||
echo "Example: Watch directory"
|
||||
echo "$0 --path ~/Downloads -m -dv http://localhost:7880/api/v1/open/upload/item/abcde-12345-abcde-12345"
|
||||
echo ""
|
||||
echo "Example: Upload all files in a directory"
|
||||
echo "$0 --path ~/Downloads -m -dv --once http://localhost:7880/api/v1/open/upload/item/abcde-12345-abcde-12345"
|
||||
echo ""
|
||||
echo "Example: Integration Endpoint"
|
||||
echo "$0 -i --iheader 'Docspell-Integration:test123' -m -p ~/Downloads/ http://localhost:7880/api/v1/open/integration/item"
|
||||
echo ""
|
||||
}
|
||||
|
||||
if [ "$help" = "y" ]; then
|
||||
showUsage
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# handle non-option arguments
|
||||
if [[ $# -eq 0 ]]; then
|
||||
echo "$0: No upload URLs given."
|
||||
exit 4
|
||||
fi
|
||||
urls=$@
|
||||
|
||||
if [ ! -d "$watchdir" ]; then
|
||||
echo "The path '$watchdir' is not a directory."
|
||||
exit 4
|
||||
fi
|
||||
|
||||
|
||||
trace() {
|
||||
if [ "$verbose" = "y" ]; then
|
||||
>&2 echo "$1"
|
||||
fi
|
||||
}
|
||||
|
||||
info() {
|
||||
>&2 echo $1
|
||||
}
|
||||
|
||||
getCollective() {
|
||||
file=$(realpath "$1")
|
||||
dir=$(realpath "$2")
|
||||
collective=${file#"$dir"}
|
||||
coll=$(echo $collective | cut -d'/' -f1)
|
||||
if [ -z "$coll" ]; then
|
||||
coll=$(echo $collective | cut -d'/' -f2)
|
||||
fi
|
||||
echo $coll
|
||||
}
|
||||
|
||||
|
||||
upload() {
|
||||
dir=$(realpath "$1")
|
||||
file=$(realpath "$2")
|
||||
url="$3"
|
||||
OPTS="$CURL_OPTS"
|
||||
if [ "$integration" = "y" ]; then
|
||||
collective=$(getCollective "$file" "$dir")
|
||||
trace "- upload: collective = $collective"
|
||||
url="$url/$collective"
|
||||
if [ $iuser ]; then
|
||||
OPTS="$OPTS --user $iuser"
|
||||
fi
|
||||
if [ $iheader ]; then
|
||||
OPTS="$OPTS -H $iheader"
|
||||
fi
|
||||
fi
|
||||
if [ "$dryrun" = "y" ]; then
|
||||
info "- Not uploading (dry-run) $file to $url with opts $OPTS"
|
||||
else
|
||||
META1=""
|
||||
META2=""
|
||||
if [ "$distinct" = "y" ]; then
|
||||
META1="-F"
|
||||
META2="meta={\"multiple\": false, \"skipDuplicates\": true}"
|
||||
fi
|
||||
trace "- Uploading $file to $url with options $OPTS"
|
||||
tf1=$($MKTEMP_CMD) tf2=$($MKTEMP_CMD) rc=0
|
||||
$CURL_CMD --fail -# -o "$tf1" --stderr "$tf2" $OPTS -XPOST $META1 "$META2" -F file=@"$file" "$url"
|
||||
if [ $? -ne 0 ]; then
|
||||
info "Upload failed. Exit code: $rc"
|
||||
cat "$tf1"
|
||||
cat "$tf2"
|
||||
echo ""
|
||||
rm "$tf1" "$tf2"
|
||||
return $rc
|
||||
else
|
||||
if cat $tf1 | grep -q '{"success":false'; then
|
||||
echo "Upload failed. Message from server:"
|
||||
cat "$tf1"
|
||||
echo ""
|
||||
rm "$tf1" "$tf2"
|
||||
return 1
|
||||
else
|
||||
info "- Upload done."
|
||||
rm "$tf1" "$tf2"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
checksum() {
|
||||
$SHA256_CMD "$1" | cut -d' ' -f1 | xargs
|
||||
}
|
||||
|
||||
checkFile() {
|
||||
local url="$1"
|
||||
local file="$2"
|
||||
local dir="$3"
|
||||
OPTS="$CURL_OPTS"
|
||||
if [ "$integration" = "y" ]; then
|
||||
collective=$(getCollective "$file" "$dir")
|
||||
url="$url/$collective"
|
||||
url=$(echo "$url" | sed 's,/item/,/checkfile/,g')
|
||||
if [ $iuser ]; then
|
||||
OPTS="$OPTS --user $iuser"
|
||||
fi
|
||||
if [ $iheader ]; then
|
||||
OPTS="$OPTS -H $iheader"
|
||||
fi
|
||||
else
|
||||
url=$(echo "$1" | sed 's,upload/item,checkfile,g')
|
||||
fi
|
||||
url=$url/$(checksum "$file")
|
||||
trace "- Check file via $OPTS: $url"
|
||||
tf1=$($MKTEMP_CMD) tf2=$($MKTEMP_CMD)
|
||||
$CURL_CMD --fail -v -o "$tf1" --stderr "$tf2" $OPTS -XGET -s "$url"
|
||||
if [ $? -ne 0 ]; then
|
||||
info "Checking file failed!"
|
||||
cat "$tf1" >&2
|
||||
cat "$tf2" >&2
|
||||
info ""
|
||||
rm "$tf1" "$tf2"
|
||||
echo "failed"
|
||||
return 1
|
||||
else
|
||||
if cat "$tf1" | grep -q '{"exists":true'; then
|
||||
rm "$tf1" "$tf2"
|
||||
echo "y"
|
||||
else
|
||||
rm "$tf1" "$tf2"
|
||||
echo "n"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
process() {
|
||||
file=$(realpath "$1")
|
||||
dir="$2"
|
||||
info "---- Processing $file ----------"
|
||||
declare -i curlrc=0
|
||||
set +e
|
||||
for url in $urls; do
|
||||
if [ "$distinct" = "y" ]; then
|
||||
trace "- Checking if $file has been uploaded to $url already"
|
||||
res=$(checkFile "$url" "$file" "$dir")
|
||||
rc=$?
|
||||
curlrc=$(expr $curlrc + $rc)
|
||||
trace "- Result from checkfile: $res"
|
||||
if [ "$res" = "y" ]; then
|
||||
info "- Skipping file '$file' because it has been uploaded in the past."
|
||||
continue
|
||||
elif [ "$res" != "n" ]; then
|
||||
info "- Checking file failed, skipping the file."
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
trace "- Uploading '$file' to '$url'."
|
||||
upload "$dir" "$file" "$url"
|
||||
rc=$?
|
||||
curlrc=$(expr $curlrc + $rc)
|
||||
if [ $rc -ne 0 ]; then
|
||||
trace "Upload to '$url' failed!"
|
||||
fi
|
||||
done
|
||||
set -e
|
||||
if [ $curlrc -ne 0 ]; then
|
||||
info "-> Some uploads failed."
|
||||
else
|
||||
trace "= File processed for all URLs"
|
||||
if [ "$delete" = "y" ]; then
|
||||
info "- Deleting file '$file'"
|
||||
set +e
|
||||
rm "$file"
|
||||
if [ $? -ne 0 ]; then
|
||||
info "- Deleting failed!"
|
||||
fi
|
||||
set -e
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
findDir() {
|
||||
path="$1"
|
||||
for dir in "${watchdir[@]}"; do
|
||||
if [[ $path = ${dir}* ]]
|
||||
then
|
||||
echo $dir
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
checkSetup() {
|
||||
for dir in "${watchdir[@]}"; do
|
||||
find "$dir" -mindepth 1 -maxdepth 1 -type d -print0 | while IFS= read -d '' -r collective; do
|
||||
for url in $urls; do
|
||||
if [ "$integration" = "y" ]; then
|
||||
url="$url/$(basename $collective)"
|
||||
OPTS="$CURL_OPTS -i -s -o /dev/null -w %{http_code}"
|
||||
if [ $iuser ]; then
|
||||
OPTS="$OPTS --user $iuser"
|
||||
fi
|
||||
if [ $iheader ]; then
|
||||
OPTS="$OPTS -H $iheader"
|
||||
fi
|
||||
trace "Checking integration endpoint: $CURL_CMD $OPTS "$url""
|
||||
status=$($CURL_CMD $OPTS "$url")
|
||||
if [ "$status" != "200" ]; then
|
||||
echo "[WARN] Collective '$(basename $collective)' failed the setup check."
|
||||
echo "[WARN] $status response, command: $CURL_CMD $OPTS $url"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
done
|
||||
done
|
||||
}
|
||||
|
||||
runOnce() {
|
||||
info "Uploading all files (except hidden) in '$watchdir'."
|
||||
MD="-maxdepth 1"
|
||||
if [ "$recursive" = "y" ]; then
|
||||
MD=""
|
||||
fi
|
||||
EXCL=""
|
||||
if [ -n "$exclude" ]; then
|
||||
EXCL="-not -name $exclude"
|
||||
fi
|
||||
INCL=""
|
||||
if [ -n "$include" ]; then
|
||||
INCL="-name $include"
|
||||
fi
|
||||
for dir in "${watchdir[@]}"; do
|
||||
find "$dir" $MD -type f $INCL $EXCL -not -name ".*" -print0 | while IFS= read -d '' -r file; do
|
||||
process "$file" "$dir"
|
||||
done
|
||||
done
|
||||
}
|
||||
|
||||
includeFile() {
|
||||
file="$1"
|
||||
if [ -n "$include" ] && [[ $file != $include ]]; then
|
||||
trace "Skip $file due to include filter"
|
||||
return 1
|
||||
elif [ -n "$exclude" ] && [[ $file == $exclude ]]; then
|
||||
trace "Skip $file due to exclude filter"
|
||||
return 1
|
||||
else
|
||||
[[ "$file" != .* ]]
|
||||
fi
|
||||
}
|
||||
|
||||
# warn if something seems not correctly configured
|
||||
checkSetup
|
||||
|
||||
if [ "$once" = "y" ]; then
|
||||
runOnce
|
||||
else
|
||||
REC=""
|
||||
if [ "$recursive" = "y" ]; then
|
||||
REC="-r"
|
||||
fi
|
||||
if [ -z "$poll" ]; then
|
||||
$INOTIFY_CMD $REC -m --format '%w%f' -e close_write -e moved_to "${watchdir[@]}" |
|
||||
while read pathfile; do
|
||||
if includeFile "$(basename "$pathfile")"; then
|
||||
dir=$(findDir "$pathfile")
|
||||
trace "The file '$pathfile' appeared below '$dir'"
|
||||
sleep 1
|
||||
process "$(realpath "$pathfile")" "$dir"
|
||||
else
|
||||
trace "Skip file $(realpath "$pathfile")"
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "Running in polling mode: ${poll}s"
|
||||
while [ : ]
|
||||
do
|
||||
runOnce
|
||||
sleep $poll
|
||||
done
|
||||
fi
|
||||
fi
|
@ -1,32 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Simple script to authenticate with docspell and trigger the "convert
|
||||
# all pdf" route that submits a task to convert all pdf files using
|
||||
# ocrmypdf.
|
||||
|
||||
set -e
|
||||
|
||||
CURL_CMD="curl"
|
||||
JQ_CMD="jq"
|
||||
|
||||
BASE_URL="${1:-http://localhost:7880}"
|
||||
LOGIN_URL="$BASE_URL/api/v1/open/auth/login"
|
||||
TRIGGER_URL="$BASE_URL/api/v1/sec/item/convertallpdfs"
|
||||
|
||||
echo "Login to trigger converting all pdfs."
|
||||
echo "Using url: $BASE_URL"
|
||||
echo -n "Account: "
|
||||
read USER
|
||||
echo -n "Password: "
|
||||
read -s PASS
|
||||
echo
|
||||
|
||||
auth=$("$CURL_CMD" --fail -XPOST --silent --data-binary "{\"account\":\"$USER\", \"password\":\"$PASS\"}" "$LOGIN_URL")
|
||||
|
||||
if [ "$(echo $auth | "$JQ_CMD" .success)" == "true" ]; then
|
||||
echo "Login successful"
|
||||
auth_token=$(echo $auth | "$JQ_CMD" -r .token)
|
||||
"$CURL_CMD" --fail -XPOST -H "X-Docspell-Auth: $auth_token" "$TRIGGER_URL"
|
||||
else
|
||||
echo "Login failed."
|
||||
fi
|
@ -1,213 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Script for downloading files (the PDF versions) flat in the current
|
||||
# directory. It takes a search query for selecting what to download.
|
||||
# Metadata is not downloaded, only the files.
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# download-files.sh <docspell-base-url> <query>
|
||||
#
|
||||
# The docspell base url is required as well as a search query. The
|
||||
# output directory is the current directory, and can be defined via
|
||||
# env variable "TARGET_DIR".
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# download-files.sh http://localhost:7880 "tag:todo folder:work"
|
||||
#
|
||||
# The script then asks for username and password and starts
|
||||
# downloading. For more details about the query, please see the docs
|
||||
# here: https://docspell.org/docs/query/
|
||||
|
||||
CURL_CMD="curl"
|
||||
JQ_CMD="jq"
|
||||
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "The base-url to docspell is required."
|
||||
exit 1
|
||||
else
|
||||
BASE_URL="$1"
|
||||
shift
|
||||
fi
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
errout "A search query is required"
|
||||
exit 1
|
||||
else
|
||||
QUERY="$1"
|
||||
shift
|
||||
fi
|
||||
|
||||
set -o errexit -o pipefail -o noclobber -o nounset
|
||||
|
||||
LOGIN_URL="$BASE_URL/api/v1/open/auth/login"
|
||||
SEARCH_URL="$BASE_URL/api/v1/sec/item/search"
|
||||
DETAIL_URL="$BASE_URL/api/v1/sec/item"
|
||||
ATTACH_URL="$BASE_URL/api/v1/sec/attachment"
|
||||
|
||||
OVERWRITE_FILE=${OVERWRITE_FILE:-n}
|
||||
TARGET=${TARGET_DIR:-"$(pwd)"}
|
||||
|
||||
errout() {
|
||||
>&2 echo "$@"
|
||||
}
|
||||
|
||||
trap "{ rm -f ${TMPDIR-:/tmp}/ds-download.*; }" EXIT
|
||||
|
||||
mcurl() {
|
||||
tmpfile1=$(mktemp -t "ds-download.XXXXX")
|
||||
tmpfile2=$(mktemp -t "ds-download.XXXXX")
|
||||
set +e
|
||||
"$CURL_CMD" -# --fail --stderr "$tmpfile1" -o "$tmpfile2" -H "X-Docspell-Auth: $auth_token" "$@"
|
||||
status=$?
|
||||
set -e
|
||||
if [ $status -ne 0 ]; then
|
||||
errout "$CURL_CMD -H 'X-Docspell-Auth: …' $@"
|
||||
errout "curl command failed (rc=$status)! Output is below."
|
||||
cat "$tmpfile1" >&2
|
||||
cat "$tmpfile2" >&2
|
||||
rm -f "$tmpfile1" "$tmpfile2"
|
||||
return 2
|
||||
else
|
||||
ret=$(cat "$tmpfile2")
|
||||
rm "$tmpfile2" "$tmpfile1"
|
||||
echo $ret
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
errout "Login to Docspell."
|
||||
errout "Using url: $BASE_URL"
|
||||
if [ -z "${DS_USER:-}" ]; then
|
||||
errout -n "Account: "
|
||||
read DS_USER
|
||||
fi
|
||||
if [ -z "${DS_PASS:-}" ]; then
|
||||
errout -n "Password: "
|
||||
read -s DS_PASS
|
||||
fi
|
||||
echo
|
||||
|
||||
declare auth
|
||||
declare auth_token
|
||||
declare auth_time
|
||||
|
||||
|
||||
login() {
|
||||
auth=$("$CURL_CMD" -s --fail -XPOST \
|
||||
--data-binary "{\"account\":\"$DS_USER\", \"password\":\"$DS_PASS\"}" "$LOGIN_URL")
|
||||
|
||||
if [ "$(echo $auth | "$JQ_CMD" .success)" == "true" ]; then
|
||||
errout "Login successful"
|
||||
auth_token=$(echo $auth | "$JQ_CMD" -r .token)
|
||||
auth_time=$(date +%s)
|
||||
else
|
||||
errout "Login failed."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
checkLogin() {
|
||||
elapsed=$((1000 * ($(date +%s) - $auth_time)))
|
||||
maxtime=$(echo $auth | "$JQ_CMD" .validMs)
|
||||
|
||||
elapsed=$(($elapsed + 1000))
|
||||
if [ $elapsed -gt $maxtime ]; then
|
||||
errout "Need to re-login $elapsed > $maxtime"
|
||||
login
|
||||
fi
|
||||
}
|
||||
|
||||
listItems() {
|
||||
OFFSET="${1:-0}"
|
||||
LIMIT="${2:-50}"
|
||||
QUERY="$3"
|
||||
errout "Get next items with offset=$OFFSET, limit=$LIMIT"
|
||||
REQ="{\"offset\":$OFFSET, \"limit\":$LIMIT, \"query\":\" $QUERY \"}"
|
||||
|
||||
mcurl -XPOST -H 'ContentType: application/json' -d "$REQ" "$SEARCH_URL" | "$JQ_CMD" -r '.groups[].items[]|.id'
|
||||
}
|
||||
|
||||
|
||||
fetchItem() {
|
||||
mcurl -XGET "$DETAIL_URL/$1"
|
||||
}
|
||||
|
||||
downloadAttachment() {
|
||||
attachId="$1"
|
||||
errout " - Download '$attachName' ($attachId)"
|
||||
|
||||
if [ -f "$attachOut" ] && [ "$SKIP_FILE" == "y" ]; then
|
||||
errout " - Skipping file '$attachOut' since it already exists"
|
||||
else
|
||||
if [ -f "$attachOut" ] && [ "$OVERWRITE_FILE" == "y" ]; then
|
||||
errout " - Removing attachment file as requested: $attachOut"
|
||||
rm -f "$attachOut"
|
||||
fi
|
||||
|
||||
DL_URL="$ATTACH_URL/$attachId"
|
||||
|
||||
checksum1=$("$CURL_CMD" -s -I -H "X-Docspell-Auth: $auth_token" "$DL_URL" | \
|
||||
grep -i 'etag' | cut -d' ' -f2 | xargs | tr -d '\r')
|
||||
"$CURL_CMD" -s -o "$attachOut" -H "X-Docspell-Auth: $auth_token" "$DL_URL"
|
||||
checksum2=$(sha256sum "$attachOut" | cut -d' ' -f1 | xargs)
|
||||
if [ "$checksum1" == "$checksum2" ]; then
|
||||
errout " - Checksum ok."
|
||||
else
|
||||
errout " - WARNING: Checksum mismatch! Server: $checksum1 Downloaded: $checksum2"
|
||||
return 3
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
downloadItem() {
|
||||
checkLogin
|
||||
itemData=$(fetchItem "$1")
|
||||
errout "Get item $(echo $itemData | "$JQ_CMD" -r .id)"
|
||||
created=$(echo $itemData|"$JQ_CMD" '.created')
|
||||
created=$((($(echo $itemData|"$JQ_CMD" '.created') + 500) / 1000))
|
||||
itemId=$(echo $itemData | "$JQ_CMD" -r '.id')
|
||||
# out="$TARGET/$(date -d @$created +%Y-%m)/$itemId"
|
||||
out="$TARGET"
|
||||
|
||||
if [ -d "$out" ] && [ "${DROP_ITEM:-}" == "y" ]; then
|
||||
errout "Removing item folder as requested: $out"
|
||||
rm -rf "$out"
|
||||
fi
|
||||
|
||||
mkdir -p "$out"
|
||||
|
||||
while read attachId attachName; do
|
||||
attachOut="$out/$attachName"
|
||||
checkLogin
|
||||
downloadAttachment "$attachId"
|
||||
done < <(echo $itemData | "$JQ_CMD" -r '.attachments[] | [.id,.name] | join(" ")')
|
||||
}
|
||||
|
||||
login
|
||||
|
||||
errout "Downloading files…"
|
||||
|
||||
allCounter=0 innerCounter=0 limit=100 offset=0 done=n
|
||||
|
||||
while [ "$done" = "n" ]; do
|
||||
checkLogin
|
||||
|
||||
innerCounter=0
|
||||
while read id; do
|
||||
downloadItem "$id"
|
||||
innerCounter=$(($innerCounter + 1))
|
||||
done < <(listItems $offset $limit "$QUERY")
|
||||
|
||||
allCounter=$(($allCounter + $innerCounter))
|
||||
offset=$(($offset + $limit))
|
||||
|
||||
|
||||
if [ $innerCounter -lt $limit ]; then
|
||||
done=y
|
||||
fi
|
||||
|
||||
done
|
||||
errout "Downloaded $allCounter items"
|
199
tools/ds.sh
199
tools/ds.sh
@ -1,199 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# A simple bash script that reads a configuration file to know where
|
||||
# to upload a given file.
|
||||
#
|
||||
# The config file contains anonymous upload urls to docspell. All
|
||||
# files given to this script are uploaded to all those urls.
|
||||
#
|
||||
# The default location for the config file is
|
||||
# `~/.config/docspell/ds.conf'.
|
||||
#
|
||||
# The config file must contain lines of the form:
|
||||
#
|
||||
# url.1=http://localhost:7880/api/v1/open/upload/item/<source-id>
|
||||
# url.2=...
|
||||
#
|
||||
# Lines starting with a `#' are ignored.
|
||||
#
|
||||
# The `-e|--exists' option allows to skip uploading and only check
|
||||
# whether a given file exists in docspell.
|
||||
|
||||
# saner programming env: these switches turn some bugs into errors
|
||||
set -o errexit -o pipefail -o noclobber -o nounset
|
||||
|
||||
CURL_CMD="curl"
|
||||
GREP_CMD="grep"
|
||||
MKTEMP_CMD="mktemp"
|
||||
SHA256_CMD="sha256sum"
|
||||
|
||||
! getopt --test > /dev/null
|
||||
if [[ ${PIPESTATUS[0]} -ne 4 ]]; then
|
||||
echo 'I’m sorry, `getopt --test` failed in this environment.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
OPTIONS=c:hsde
|
||||
LONGOPTS=config:,help,skip,delete,exists,allow-duplicates
|
||||
|
||||
! PARSED=$(getopt --options=$OPTIONS --longoptions=$LONGOPTS --name "$0" -- "$@")
|
||||
if [[ ${PIPESTATUS[0]} -ne 0 ]]; then
|
||||
# e.g. return value is 1
|
||||
# then getopt has complained about wrong arguments to stdout
|
||||
exit 2
|
||||
fi
|
||||
|
||||
# read getopt’s output this way to handle the quoting right:
|
||||
eval set -- "$PARSED"
|
||||
|
||||
exists=n delete=n help=n config="${XDG_CONFIG_HOME:-$HOME/.config}/docspell/ds.conf" dupes=n
|
||||
while true; do
|
||||
case "$1" in
|
||||
-h|--help)
|
||||
help=y
|
||||
shift
|
||||
;;
|
||||
-c|--config)
|
||||
config="$2"
|
||||
shift 2
|
||||
;;
|
||||
-d|--delete)
|
||||
delete="y"
|
||||
shift
|
||||
;;
|
||||
-e|--exists)
|
||||
exists=y
|
||||
shift
|
||||
;;
|
||||
--allow-duplicates)
|
||||
dupes=y
|
||||
shift
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
;;
|
||||
*)
|
||||
echo "Programming error"
|
||||
exit 3
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
|
||||
info() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
checksum() {
|
||||
$SHA256_CMD "$1" | cut -d' ' -f1 | xargs
|
||||
}
|
||||
|
||||
checkFile() {
|
||||
local url=$(echo "$1" | sed 's,upload/item,checkfile,g')
|
||||
local file="$2"
|
||||
$CURL_CMD -XGET -s "$url/$(checksum "$file")" | (2>&1 1>/dev/null grep '"exists":true')
|
||||
}
|
||||
|
||||
upload_file() {
|
||||
tf=$($MKTEMP_CMD) rc=0
|
||||
META1=""
|
||||
META2=""
|
||||
if [ "$dupes" = "y" ]; then
|
||||
META1="-F"
|
||||
META2="meta={\"multiple\": false, \"skipDuplicates\": false}"
|
||||
else
|
||||
META1="-F"
|
||||
META2="meta={\"multiple\": false, \"skipDuplicates\": true}"
|
||||
fi
|
||||
$CURL_CMD -# -o "$tf" --stderr "$tf" -w "%{http_code}" -XPOST $META1 "$META2" -F file=@"$1" "$2" | (2>&1 1>/dev/null grep 200)
|
||||
rc=$(expr $rc + $?)
|
||||
cat $tf | (2>&1 1>/dev/null grep '{"success":true')
|
||||
rc=$(expr $rc + $?)
|
||||
if [ $rc -ne 0 ]; then
|
||||
info "Upload failed. Exit code: $rc"
|
||||
cat "$tf"
|
||||
echo ""
|
||||
rm "$tf"
|
||||
return $rc
|
||||
else
|
||||
rm "$tf"
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
upload() {
|
||||
if [ "$dupes" == "y" ]; then
|
||||
upload_file "$1" "$2"
|
||||
else
|
||||
checkFile "$2" "$1"
|
||||
if [ $? -eq 0 ]; then
|
||||
info "File already exists at url $2"
|
||||
return 0
|
||||
else
|
||||
upload_file "$1" "$2"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
showUsage() {
|
||||
info "Upload files to docspell"
|
||||
info ""
|
||||
info "Usage: $0 [options] file [file ...]"
|
||||
info ""
|
||||
info "Options:"
|
||||
info " -c | --config Provide a config file. (value: $config)"
|
||||
info " -d | --delete Delete the files when successfully uploaded (value: $delete)"
|
||||
info " -h | --help Prints this help text. (value: $help)"
|
||||
info " -e | --exists Checks for the existence of a file instead of uploading (value: $exists)"
|
||||
info " --allow-duplicates Do not skip existing files in docspell (value: $dupes)"
|
||||
info ""
|
||||
info "Arguments:"
|
||||
info " One or more files to check for existence or upload."
|
||||
info ""
|
||||
}
|
||||
|
||||
if [ "$help" = "y" ]; then
|
||||
showUsage
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# handle non-option arguments
|
||||
if [[ $# -eq 0 ]]; then
|
||||
echo "$0: No files given."
|
||||
exit 4
|
||||
fi
|
||||
|
||||
|
||||
## Read the config file
|
||||
declare -a urls
|
||||
while IFS="=" read -r k v
|
||||
do
|
||||
if [[ $k == url* ]]; then
|
||||
urls+=($(echo "$v" | xargs))
|
||||
fi
|
||||
done <<< $($GREP_CMD -v '^#.*' "$config")
|
||||
|
||||
|
||||
## Main
|
||||
IFS=$'\n'
|
||||
for file in $*; do
|
||||
for url in "${urls[@]}"; do
|
||||
if [ "$exists" = "y" ]; then
|
||||
if checkFile "$url" "$file"; then
|
||||
info "$url $file: true"
|
||||
else
|
||||
info "$url $file: false"
|
||||
fi
|
||||
else
|
||||
info "Uploading '$file' to '$url'"
|
||||
set +e
|
||||
upload "$file" "$url"
|
||||
set -e
|
||||
if [ "$delete" = "y" ] && [ $? -eq 0 ]; then
|
||||
info "Deleting file: $file"
|
||||
rm -f "$file"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
done
|
@ -1,256 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Simple script for downloading all your files. It goes through all
|
||||
# items visible to the logged in user and downloads the attachments
|
||||
# (the original files).
|
||||
#
|
||||
# The item's metadata are stored next to the files to provide more
|
||||
# information about the item: tags, dates, custom fields etc. This
|
||||
# contains most of your user supplied data.
|
||||
#
|
||||
# This script is intended for having your data outside and independent
|
||||
# of docspell. Another good idea for a backup strategy is to take
|
||||
# database dumps *and* storing the releases of docspell next to this
|
||||
# dump.
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# export-files.sh <docspell-base-url> <target-directory>
|
||||
#
|
||||
# The docspell base url is required as well as a directory to store
|
||||
# all the files into.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# export-files.sh http://localhost:7880 /tmp/ds-download
|
||||
#
|
||||
# The script then asks for username and password and starts
|
||||
# downloading. Files are downloaded into the following structure
|
||||
# (below the given target directory):
|
||||
#
|
||||
# - yyyy-mm (item date)
|
||||
# - A3…XY (item id)
|
||||
# - somefile.pdf (attachments with name)
|
||||
# - metadata.json (json file with items metadata)
|
||||
#
|
||||
# By default, files are not overwritten, it stops if existing files
|
||||
# are encountered. Configuration can be specified using environment
|
||||
# variables:
|
||||
#
|
||||
# - OVERWRITE_FILE= if `y` then overwriting existing files is ok.
|
||||
# - SKIP_FILE= if `y` then existing files are skipped (supersedes
|
||||
# OVERWRITE_FILE).
|
||||
# - DROP_ITEM= if `y` the item folder is removed before attempting to
|
||||
# download it. If this is set to `y` then the above options don't
|
||||
# make sense, since they operate on the files inside the item folder
|
||||
#
|
||||
# Docspell sends with each file its sha256 checksum via the ETag
|
||||
# header. This is used to do a integrity check after downloading.
|
||||
|
||||
CURL_CMD="curl"
|
||||
JQ_CMD="jq"
|
||||
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "The base-url to docspell is required."
|
||||
exit 1
|
||||
else
|
||||
BASE_URL="$1"
|
||||
shift
|
||||
fi
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "A directory is required to store the files into."
|
||||
exit 1
|
||||
else
|
||||
TARGET="$1"
|
||||
shift
|
||||
fi
|
||||
|
||||
set -o errexit -o pipefail -o noclobber -o nounset
|
||||
|
||||
LOGIN_URL="$BASE_URL/api/v1/open/auth/login"
|
||||
SEARCH_URL="$BASE_URL/api/v1/sec/item/search"
|
||||
INSIGHT_URL="$BASE_URL/api/v1/sec/collective/insights"
|
||||
DETAIL_URL="$BASE_URL/api/v1/sec/item"
|
||||
ATTACH_URL="$BASE_URL/api/v1/sec/attachment"
|
||||
|
||||
OVERWRITE_FILE=${OVERWRITE_FILE:-n}
|
||||
DROP_ITEM=${DROP_ITEM:-n}
|
||||
|
||||
errout() {
|
||||
>&2 echo "$@"
|
||||
}
|
||||
|
||||
trap "{ rm -f ${TMPDIR-:/tmp}/ds-export.*; }" EXIT
|
||||
|
||||
mcurl() {
|
||||
tmpfile1=$(mktemp -t "ds-export.XXXXX")
|
||||
tmpfile2=$(mktemp -t "ds-export.XXXXX")
|
||||
set +e
|
||||
"$CURL_CMD" -# --fail --stderr "$tmpfile1" -o "$tmpfile2" -H "X-Docspell-Auth: $auth_token" "$@"
|
||||
status=$?
|
||||
set -e
|
||||
if [ $status -ne 0 ]; then
|
||||
errout "$CURL_CMD -H 'X-Docspell-Auth: …' $@"
|
||||
errout "curl command failed (rc=$status)! Output is below."
|
||||
cat "$tmpfile1" >&2
|
||||
cat "$tmpfile2" >&2
|
||||
rm -f "$tmpfile1" "$tmpfile2"
|
||||
return 2
|
||||
else
|
||||
ret=$(cat "$tmpfile2")
|
||||
rm "$tmpfile2" "$tmpfile1"
|
||||
echo $ret
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
errout "Login to Docspell."
|
||||
errout "Using url: $BASE_URL"
|
||||
if [ -z "${DS_USER:-}" ]; then
|
||||
errout -n "Account: "
|
||||
read DS_USER
|
||||
fi
|
||||
if [ -z "${DS_PASS:-}" ]; then
|
||||
errout -n "Password: "
|
||||
read -s DS_PASS
|
||||
fi
|
||||
echo
|
||||
|
||||
declare auth
|
||||
declare auth_token
|
||||
declare auth_time
|
||||
|
||||
|
||||
login() {
|
||||
auth=$("$CURL_CMD" -s --fail -XPOST \
|
||||
--data-binary "{\"account\":\"$DS_USER\", \"password\":\"$DS_PASS\"}" "$LOGIN_URL")
|
||||
|
||||
if [ "$(echo $auth | "$JQ_CMD" .success)" == "true" ]; then
|
||||
errout "Login successful"
|
||||
auth_token=$(echo $auth | "$JQ_CMD" -r .token)
|
||||
auth_time=$(date +%s)
|
||||
else
|
||||
errout "Login failed."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
checkLogin() {
|
||||
elapsed=$((1000 * ($(date +%s) - $auth_time)))
|
||||
maxtime=$(echo $auth | "$JQ_CMD" .validMs)
|
||||
|
||||
elapsed=$(($elapsed + 1000))
|
||||
if [ $elapsed -gt $maxtime ]; then
|
||||
errout "Need to re-login $elapsed > $maxtime"
|
||||
login
|
||||
fi
|
||||
}
|
||||
|
||||
listItems() {
|
||||
OFFSET="${1:-0}"
|
||||
LIMIT="${2:-50}"
|
||||
errout "Get next items with offset=$OFFSET, limit=$LIMIT"
|
||||
REQ="{\"offset\":$OFFSET, \"limit\":$LIMIT, \"withDetails\":true, \"query\":\"\"}"
|
||||
|
||||
mcurl -XPOST -H 'ContentType: application/json' -d "$REQ" "$SEARCH_URL" | "$JQ_CMD" -r '.groups[].items[]|.id'
|
||||
}
|
||||
|
||||
fetchItemCount() {
|
||||
mcurl -XGET "$INSIGHT_URL" | "$JQ_CMD" '[.incomingCount, .outgoingCount] | add'
|
||||
}
|
||||
|
||||
fetchItem() {
|
||||
mcurl -XGET "$DETAIL_URL/$1"
|
||||
}
|
||||
|
||||
downloadAttachment() {
|
||||
attachId="$1"
|
||||
errout " - Download '$attachName' ($attachId)"
|
||||
|
||||
if [ -f "$attachOut" ] && [ "$SKIP_FILE" == "y" ]; then
|
||||
errout " - Skipping file '$attachOut' since it already exists"
|
||||
else
|
||||
if [ -f "$attachOut" ] && [ "$OVERWRITE_FILE" == "y" ]; then
|
||||
errout " - Removing attachment file as requested: $attachOut"
|
||||
rm -f "$attachOut"
|
||||
fi
|
||||
|
||||
checksum1=$("$CURL_CMD" -s -I -H "X-Docspell-Auth: $auth_token" "$ATTACH_URL/$attachId/original" | \
|
||||
grep -i 'etag' | cut -d':' -f2 | xargs | tr -d '\r')
|
||||
"$CURL_CMD" -s -o "$attachOut" -H "X-Docspell-Auth: $auth_token" "$ATTACH_URL/$attachId/original"
|
||||
checksum2=$(sha256sum "$attachOut" | cut -d' ' -f1 | xargs)
|
||||
if [ "$checksum1" == "$checksum2" ]; then
|
||||
errout " - Checksum ok."
|
||||
else
|
||||
errout " - WARNING: Checksum mismatch! Server: $checksum1 Downloaded: $checksum2"
|
||||
return 3
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
downloadItem() {
|
||||
checkLogin
|
||||
itemData=$(fetchItem "$1")
|
||||
errout "Get item $(echo $itemData | "$JQ_CMD" -r .id)"
|
||||
created=$(echo $itemData|"$JQ_CMD" '.created')
|
||||
created=$((($(echo $itemData|"$JQ_CMD" '.created') + 500) / 1000))
|
||||
itemId=$(echo $itemData | "$JQ_CMD" -r '.id')
|
||||
out="$TARGET/$(date -d @$created +%Y-%m)/$itemId"
|
||||
|
||||
if [ -d "$out" ] && [ "$DROP_ITEM" == "y" ]; then
|
||||
errout "Removing item folder as requested: $out"
|
||||
rm -rf "$out"
|
||||
fi
|
||||
|
||||
mkdir -p "$out"
|
||||
if [ -f "$out/metadata.json" ] && [ "$SKIP_FILE" == "y" ]; then
|
||||
errout " - Skipping file 'metadata.json' since it already exists"
|
||||
else
|
||||
if [ -f "$out/metadata.json" ] && [ "$OVERWRITE_FILE" == "y" ]; then
|
||||
errout " - Removing metadata.json as requested"
|
||||
rm -f "$out/metadata.json"
|
||||
fi
|
||||
echo $itemData > "$out/metadata.json"
|
||||
fi
|
||||
while read attachId attachName; do
|
||||
attachOut="$out/$attachName"
|
||||
checkLogin
|
||||
downloadAttachment "$attachId"
|
||||
done < <(echo $itemData | "$JQ_CMD" -r '.sources[] | [.id,.name] | join(" ")')
|
||||
}
|
||||
|
||||
login
|
||||
|
||||
allCount=$(fetchItemCount)
|
||||
errout "Downloading $allCount items…"
|
||||
|
||||
allCounter=0 innerCounter=0 limit=100 offset=0 done=n
|
||||
|
||||
while [ "$done" = "n" ]; do
|
||||
checkLogin
|
||||
|
||||
innerCounter=0
|
||||
while read id; do
|
||||
downloadItem "$id"
|
||||
innerCounter=$(($innerCounter + 1))
|
||||
done < <(listItems $offset $limit)
|
||||
|
||||
allCounter=$(($allCounter + $innerCounter))
|
||||
offset=$(($offset + $limit))
|
||||
|
||||
|
||||
if [ $innerCounter -lt $limit ]; then
|
||||
done=y
|
||||
fi
|
||||
|
||||
done
|
||||
errout "Downloaded $allCounter/$allCount items"
|
||||
if [[ $allCounter < $allCount ]]; then
|
||||
errout
|
||||
errout " Downloaded less items than were reported as available. This"
|
||||
errout " may be due to items in folders that you cannot see. Or it"
|
||||
errout " may be a bug."
|
||||
errout
|
||||
fi
|
@ -1,22 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# This script submits a job to regenerate all preview images. This may
|
||||
# be necessary if you change the dpi setting that affects the size of
|
||||
# the preview.
|
||||
|
||||
set -e
|
||||
|
||||
CURL_CMD="curl"
|
||||
JQ_CMD="jq"
|
||||
|
||||
|
||||
BASE_URL="${1:-http://localhost:7880}"
|
||||
TRIGGER_URL="$BASE_URL/api/v1/admin/attachments/generatePreviews"
|
||||
|
||||
echo "Login to trigger regenerating preview images."
|
||||
echo "Using url: $BASE_URL"
|
||||
echo -n "Admin Secret: "
|
||||
read -s ADMIN_SECRET
|
||||
echo
|
||||
|
||||
curl --fail -XPOST -H "Docspell-Admin-Secret: $ADMIN_SECRET" "$TRIGGER_URL"
|
@ -1,49 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# A script to reset a password.
|
||||
#
|
||||
# Usage:
|
||||
# ./reset-password.sh <baseurl> <admin-secret> <account>
|
||||
#
|
||||
# Example:
|
||||
# ./reset-password.sh http://localhost:7880 test123 your/account
|
||||
#
|
||||
|
||||
CURL_CMD="curl"
|
||||
JQ_CMD="jq"
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "The docspell base-url is required as first argument."
|
||||
exit 1
|
||||
else
|
||||
BASE_URL="$1"
|
||||
fi
|
||||
|
||||
if [ -z "$2" ]; then
|
||||
echo "The admin secret is required as second argument."
|
||||
exit 1
|
||||
else
|
||||
SECRET="$2"
|
||||
fi
|
||||
|
||||
if [ -z "$3" ]; then
|
||||
echo "The user account is required as third argument."
|
||||
exit 1
|
||||
else
|
||||
USER="$3"
|
||||
fi
|
||||
|
||||
RESET_URL="${BASE_URL}/api/v1/admin/user/resetPassword"
|
||||
|
||||
OUT=$("$CURL_CMD" -s -XPOST \
|
||||
-H "Docspell-Admin-Secret: $SECRET" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"account\": \"$USER\"}" \
|
||||
"$RESET_URL")
|
||||
|
||||
|
||||
if command -v "$JQ_CMD" > /dev/null; then
|
||||
echo $OUT | "$JQ_CMD"
|
||||
else
|
||||
echo $OUT
|
||||
fi
|
Reference in New Issue
Block a user