From 35882fce8417caf5a6ac894c06af7c006e860a02 Mon Sep 17 00:00:00 2001 From: eikek Date: Tue, 5 Jul 2022 21:15:53 +0200 Subject: [PATCH 01/15] Refactor collective table to use artificial primary key --- .../migration/h2/V1.39.0__collective_id.sql | 210 ++++++++++++++++ .../mariadb/V1.39.0__collective_id.sql | 232 ++++++++++++++++++ .../postgresql/V1.39.0__collective_id.sql | 215 ++++++++++++++++ 3 files changed, 657 insertions(+) create mode 100644 modules/store/src/main/resources/db/migration/h2/V1.39.0__collective_id.sql create mode 100644 modules/store/src/main/resources/db/migration/mariadb/V1.39.0__collective_id.sql create mode 100644 modules/store/src/main/resources/db/migration/postgresql/V1.39.0__collective_id.sql diff --git a/modules/store/src/main/resources/db/migration/h2/V1.39.0__collective_id.sql b/modules/store/src/main/resources/db/migration/h2/V1.39.0__collective_id.sql new file mode 100644 index 00000000..0443335b --- /dev/null +++ b/modules/store/src/main/resources/db/migration/h2/V1.39.0__collective_id.sql @@ -0,0 +1,210 @@ +-- add new id column +alter table "collective" add column "id" bigserial not null; +create unique index "collective_id_idx" on "collective"("id"); + +-- change references: source +alter table "source" add column "coll_id" bigint not null default 0; +update "source" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "source_coll_id_idx" on "source"("coll_id"); +alter table "source" add constraint "source_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "source" drop constraint "CONSTRAINT_CA"; +alter table "source" drop column "cid"; +alter table "source" alter column "coll_id" drop default; + +-- change references: tag +alter table "tag" add column "coll_id" bigint not null default 0; +update "tag" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "tag_coll_id_idx" on "tag"("coll_id"); +create unique index "tag_coll_id_name_idx" on "tag"("coll_id", "name"); +alter table "tag" add constraint "tag_coll_id_name_key" unique("coll_id", "name"); +alter table "tag" add constraint "tag_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "tag" drop constraint "CONSTRAINT_1BF"; +alter table "tag" drop column "cid"; +alter table "tag" alter column "coll_id" drop default; + +-- change references: user_ +alter table "user_" add column "coll_id" bigint not null default 0; +update "user_" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "user__coll_id_idx" on "user_"("coll_id"); +create unique index "user__coll_id_login_idx" on "user_"("coll_id", "login"); +alter table "user_" add constraint "user__coll_id_login_key" unique("coll_id", "login"); +alter table "user_" add constraint "user__coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "user_" drop constraint "CONSTRAINT_6A"; +alter table "user_" drop column "cid"; +alter table "user_" alter column "coll_id" drop default; + +-- change references: query_bookmark +alter table "query_bookmark" add column "coll_id" bigint not null default 0; +update "query_bookmark" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "query_bookmark_coll_id_idx" on "query_bookmark"("coll_id"); +create unique index "query_bookmark_coll_id__user_id_name_idx" on "query_bookmark"("coll_id", "__user_id", "name"); +alter table "query_bookmark" add constraint "query_bookmark_coll_id__user_id_name_key" unique("coll_id", "__user_id", "name"); +alter table "query_bookmark" add constraint "query_bookmark_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "query_bookmark" drop constraint "CONSTRAINT_FF591"; +alter table "query_bookmark" drop column "cid"; +alter table "query_bookmark" alter column "coll_id" drop default; + +-- change references: person +alter table "person" add column "coll_id" bigint not null default 0; +update "person" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "person_coll_id_idx" on "person"("coll_id"); +create unique index "person_coll_id_name_idx" on "person"("coll_id", "name"); +alter table "person" add constraint "person_coll_id_name_key" unique("coll_id", "name"); +alter table "person" add constraint "person_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "person" drop constraint "CONSTRAINT_C4E"; +alter table "person" drop column "cid"; +alter table "person" alter column "coll_id" drop default; + +-- change references: organization +alter table "organization" add column "coll_id" bigint not null default 0; +update "organization" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "organization_coll_id_idx" on "organization"("coll_id"); +create unique index "organization_coll_id_name_idx" on "organization"("coll_id", "name"); +alter table "organization" add constraint "organization_coll_id_name_key" unique("coll_id", "name"); +alter table "organization" add constraint "organization_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "organization" drop constraint "CONSTRAINT_464"; +alter table "organization" drop column "cid"; +alter table "organization" alter column "coll_id" drop default; + +-- change references: item_link +alter table "item_link" add column "coll_id" bigint not null default 0; +update "item_link" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "item_link_coll_id_idx" on "item_link"("coll_id"); +create unique index "item_link_coll_id_item1_item2_idx" on "item_link"("coll_id", "item1", "item2"); +alter table "item_link" add constraint "item_link_coll_id_item1_item2_key" unique("coll_id", "item1", "item2"); +alter table "item_link" add constraint "item_link_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "item_link" drop constraint "CONSTRAINT_805"; +alter table "item_link" drop column "cid"; +alter table "item_link" alter column "coll_id" drop default; + +-- change references: item +alter table "item" add column "coll_id" bigint not null default 0; +update "item" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "item_coll_id_idx" on "item"("coll_id"); +alter table "item" add constraint "item_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "item" drop column "cid"; +alter table "item" alter column "coll_id" drop default; + +-- change references: folder +alter table "folder" add column "coll_id" bigint not null default 0; +update "folder" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "folder_coll_id_idx" on "folder"("coll_id"); +create unique index "folder_coll_id_name_idx" on "folder"("coll_id", "name"); +alter table "folder" add constraint "folder_coll_id_name_key" unique("coll_id", "name"); +alter table "folder" add constraint "folder_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "folder" drop constraint "CONSTRAINT_B45"; +alter table "folder" drop column "cid"; +alter table "folder" alter column "coll_id" drop default; + +-- change references: equipment +alter table "equipment" add column "coll_id" bigint not null default 0; +update "equipment" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "equipment_coll_id_idx" on "equipment"("coll_id"); +create unique index "equipment_coll_id_name_idx" on "equipment"("coll_id", "name"); +alter table "equipment" add constraint "equipment_coll_id_name_key" unique("coll_id", "name"); +alter table "equipment" add constraint "equipment_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "equipment" drop constraint "CONSTRAINT_402"; +alter table "equipment" drop constraint "equipment_cid_name_key"; +alter table "equipment" drop column "cid"; +alter table "equipment" alter column "coll_id" drop default; + +-- change references: empty_trash_setting +alter table "empty_trash_setting" add column "coll_id" bigint not null default 0; +update "empty_trash_setting" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "empty_trash_setting_coll_id_idx" on "empty_trash_setting"("coll_id"); +alter table "empty_trash_setting" add constraint "empty_trash_setting_coll_id_fkey" +foreign key ("coll_id") references "collective"("id"); +alter table "empty_trash_setting" drop column "cid"; +alter table "empty_trash_setting" alter column "coll_id" drop default; +alter table "empty_trash_setting" add primary key(coll_id); + +-- change references: download_query +alter table "download_query" add column "coll_id" bigint not null default 0; +update "download_query" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "download_query_coll_id_idx" on "download_query"("coll_id"); +alter table "download_query" add constraint "download_query_coll_id_fkey" +foreign key ("coll_id") references "collective"("id"); +alter table "download_query" drop column "cid"; +alter table "download_query" alter column "coll_id" drop default; + +-- change references: custom_field +alter table "custom_field" add column "coll_id" bigint not null default 0; +update "custom_field" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "custom_field_coll_id_idx" on "custom_field"("coll_id"); +create unique index "custom_field_coll_id_name_idx" on "custom_field"("coll_id", "name"); +alter table "custom_field" add constraint "custom_field_coll_id_name_key" unique("coll_id", "name"); +alter table "custom_field" add constraint "custom_field_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "custom_field" drop constraint "CONSTRAINT_2ACD"; +alter table "custom_field" drop column "cid"; +alter table "custom_field" alter column "coll_id" drop default; + +-- change references: collective_password +alter table "collective_password" add column "coll_id" bigint not null default 0; +update "collective_password" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "collective_password_coll_id_idx" on "collective_password"("coll_id"); +alter table "collective_password" add constraint "collective_password_coll_id_fkey" +foreign key ("coll_id") references "collective"("id"); +alter table "collective_password" drop column "cid"; +alter table "collective_password" alter column "coll_id" drop default; + +-- change references: client_settings_collective +alter table "client_settings_collective" add column "coll_id" bigint not null default 0; +update "client_settings_collective" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "client_settings_collective_coll_id_idx" on "client_settings_collective"("coll_id"); +create unique index "client_settings_collective_coll_id_client_id_idx" on "client_settings_collective"("coll_id", "client_id"); +alter table "client_settings_collective" add constraint "client_settings_collective_coll_id_name_key" unique("coll_id", "client_id"); +alter table "client_settings_collective" add constraint "client_settings_collective_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "client_settings_collective" drop constraint "CONSTRAINT_7682"; +alter table "client_settings_collective" drop column "cid"; +alter table "client_settings_collective" alter column "coll_id" drop default; + +-- change references: classifier_setting +alter table "classifier_setting" add column "coll_id" bigint not null default 0; +update "classifier_setting" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "classifier_setting_coll_id_idx" on "classifier_setting"("coll_id"); +alter table "classifier_setting" add constraint "classifier_setting_coll_id_fkey" + foreign key ("coll_id") references "collective"("id"); +alter table "classifier_setting" drop column "cid"; +alter table "classifier_setting" alter column "coll_id" drop default; + +-- change references: classifier_model +alter table "classifier_model" add column "coll_id" bigint not null default 0; +update "classifier_model" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "classifier_model_coll_id_idx" on "classifier_model"("coll_id"); +create unique index "classifier_model_coll_id_name_idx" on "classifier_model"("coll_id", "name"); +alter table "classifier_model" add constraint "classifier_model_coll_id_name_key" unique("coll_id", "name"); +alter table "classifier_model" add constraint "classifier_model_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "classifier_model" drop constraint "CONSTRAINT_BC7B9"; +alter table "classifier_model" drop column "cid"; +alter table "classifier_model" alter column "coll_id" drop default; + +-- change references: addon_run_config +alter table "addon_run_config" add column "coll_id" bigint not null default 0; +update "addon_run_config" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "addon_run_config_coll_id_idx" on "addon_run_config"("coll_id"); +alter table "addon_run_config" add constraint "addon_run_config_coll_id_fkey" + foreign key ("coll_id") references "collective"("id"); +alter table "addon_run_config" drop column "cid"; +alter table "addon_run_config" alter column "coll_id" drop default; + +-- change references: addon_archive +alter table "addon_archive" add column "coll_id" bigint not null default 0; +update "addon_archive" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "addon_archive_coll_id_idx" on "addon_archive"("coll_id"); +create unique index "addon_archive_coll_id_name_version_idx" on "addon_archive"("coll_id", "name", "version"); +create unique index "addon_archive_coll_id_original_url_idx" on "addon_archive"("coll_id", "original_url"); +alter table "addon_archive" add constraint "addon_archive_coll_id_name_version_key" unique("coll_id", "name", "version"); +alter table "addon_archive" add constraint "addon_archive_coll_id_original_url_key" unique("coll_id", "original_url"); +alter table "addon_archive" add constraint "addon_archive_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "addon_archive" drop constraint "CONSTRAINT_8B9829"; +alter table "addon_archive" drop constraint "CONSTRAINT_8B9829C"; +alter table "addon_archive" drop column "cid"; +alter table "addon_archive" alter column "coll_id" drop default; + + +-- change primary key +alter table "collective" drop primary key; +alter table "collective" add constraint "collective_id_pkey" primary key ("id"); +alter table "collective" rename column "cid" to "name"; +create unique index "collective_name_idx" on "collective"("name"); +alter table "collective" add constraint "collective_name_key" unique("name"); diff --git a/modules/store/src/main/resources/db/migration/mariadb/V1.39.0__collective_id.sql b/modules/store/src/main/resources/db/migration/mariadb/V1.39.0__collective_id.sql new file mode 100644 index 00000000..aa6616ea --- /dev/null +++ b/modules/store/src/main/resources/db/migration/mariadb/V1.39.0__collective_id.sql @@ -0,0 +1,232 @@ +-- add new id column +alter table `collective` add column `id` int auto_increment not null unique; +create unique index `collective_id_idx` on `collective`(`id`); + +-- change references: source +alter table `source` add column `coll_id` int not null default 0; +update `source` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +alter table `source` drop constraint `source_ibfk_1`; +alter table `source` drop constraint `cid`; +create index `source_coll_id_idx` on `source`(`coll_id`); +alter table `source` add constraint `source_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `source` drop index `source_cid_idx`; +alter table `source` drop column `cid`; +alter table `source` alter column `coll_id` drop default; + +-- change references: tag +alter table `tag` add column `coll_id` int not null default 0; +update `tag` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `tag_coll_id_idx` on `tag`(`coll_id`); +create unique index `tag_coll_id_name_idx` on `tag`(`coll_id`, `name`); +alter table `tag` add constraint `tag_coll_id_name_key` unique(`coll_id`, `name`); +alter table `tag` add constraint `tag_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `tag` drop constraint `tag_ibfk_1`; +alter table `tag` drop constraint `cid`; +alter table `tag` drop column `cid`; +alter table `tag` alter column `coll_id` drop default; + +-- change references: user_ +alter table `user_` add column `coll_id` int not null default 0; +update `user_` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `user__coll_id_idx` on `user_`(`coll_id`); +create unique index `user__coll_id_login_idx` on `user_`(`coll_id`, `login`); +alter table `user_` add constraint `user__coll_id_login_key` unique(`coll_id`, `login`); +alter table `user_` add constraint `user__coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `user_` drop constraint `user__ibfk_1`; +alter table `user_` drop constraint `cid`; +alter table `user_` drop column `cid`; +alter table `user_` alter column `coll_id` drop default; + +-- change references: query_bookmark +alter table `query_bookmark` add column `coll_id` int not null default 0; +update `query_bookmark` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `query_bookmark_coll_id_idx` on `query_bookmark`(`coll_id`); +create unique index `query_bookmark_coll_id__user_id_name_idx` on `query_bookmark`(`coll_id`, `__user_id`, `name`); +alter table `query_bookmark` add constraint `query_bookmark_coll_id__user_id_name_key` unique(`coll_id`, `__user_id`, `name`); +alter table `query_bookmark` add constraint `query_bookmark_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `query_bookmark` drop constraint `query_bookmark_ibfk_2`; +alter table `query_bookmark` drop constraint `cid`; +alter table `query_bookmark` drop column `cid`; +alter table `query_bookmark` alter column `coll_id` drop default; + +-- change references: person +alter table `person` add column `coll_id` int not null default 0; +update `person` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `person_coll_id_idx` on `person`(`coll_id`); +create unique index `person_coll_id_name_idx` on `person`(`coll_id`, `name`); +alter table `person` add constraint `person_coll_id_name_key` unique(`coll_id`, `name`); +alter table `person` add constraint `person_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `person` drop constraint `person_ibfk_1`; +alter table `person` drop constraint `cid`; +alter table `person` drop column `cid`; +alter table `person` alter column `coll_id` drop default; + +-- change references: organization +alter table `organization` add column `coll_id` int not null default 0; +update `organization` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `organization_coll_id_idx` on `organization`(`coll_id`); +create unique index `organization_coll_id_name_idx` on `organization`(`coll_id`, `name`); +alter table `organization` add constraint `organization_coll_id_name_key` unique(`coll_id`, `name`); +alter table `organization` add constraint `organization_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `organization` drop constraint `cid`; +alter table `organization` drop constraint `organization_ibfk_1`; +alter table `organization` drop column `cid`; +alter table `organization` alter column `coll_id` drop default; + +-- change references: item_link +alter table `item_link` add column `coll_id` int not null default 0; +update `item_link` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `item_link_coll_id_idx` on `item_link`(`coll_id`); +create unique index `item_link_coll_id_item1_item2_idx` on `item_link`(`coll_id`, `item1`, `item2`); +alter table `item_link` add constraint `item_link_coll_id_item1_item2_key` unique(`coll_id`, `item1`, `item2`); +alter table `item_link` add constraint `item_link_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `item_link` drop constraint `item_link_ibfk_1`; +alter table `item_link` drop constraint `cid`; +alter table `item_link` drop column `cid`; +alter table `item_link` alter column `coll_id` drop default; + +-- change references: item +alter table `item` add column `coll_id` int not null default 0; +update `item` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `item_coll_id_idx` on `item`(`coll_id`); +alter table `item` add constraint `item_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `item` drop constraint `item_ibfk_6`; +alter table `item` drop column `cid`; +alter table `item` alter column `coll_id` drop default; + +-- change references: folder +alter table `folder` add column `coll_id` int not null default 0; +update `folder` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +alter table `folder` drop constraint `folder_ibfk_1`; +alter table `folder` drop constraint `name`; +create index `folder_coll_id_idx` on `folder`(`coll_id`); +create unique index `folder_coll_id_name_idx` on `folder`(`coll_id`, `name`); +alter table `folder` add constraint `folder_coll_id_name_key` unique(`coll_id`, `name`); +alter table `folder` add constraint `folder_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `folder` drop column `cid`; +alter table `folder` alter column `coll_id` drop default; + +-- change references: equipment +alter table `equipment` add column `coll_id` int not null default 0; +update `equipment` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `equipment_coll_id_idx` on `equipment`(`coll_id`); +create unique index `equipment_coll_id_name_idx` on `equipment`(`coll_id`, `name`); +alter table `equipment` add constraint `equipment_coll_id_name_key` unique(`coll_id`, `name`); +alter table `equipment` add constraint `equipment_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `equipment` drop constraint `equipment_ibfk_1`; +alter table `equipment` drop constraint `equipment_cid_name_key`; +alter table `equipment` drop constraint `cid`; +alter table `equipment` drop column `cid`; +alter table `equipment` alter column `coll_id` drop default; + +-- change references: empty_trash_setting +alter table `empty_trash_setting` add column `coll_id` int not null default 0; +update `empty_trash_setting` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `empty_trash_setting_coll_id_idx` on `empty_trash_setting`(`coll_id`); +alter table `empty_trash_setting` add constraint `empty_trash_setting_coll_id_fkey` +foreign key (`coll_id`) references `collective`(`id`); +alter table `empty_trash_setting` drop constraint `empty_trash_setting_ibfk_1`; +alter table `empty_trash_setting` drop column `cid`; +alter table `empty_trash_setting` alter column `coll_id` drop default; +alter table `empty_trash_setting` add primary key(coll_id); + +-- change references: download_query +alter table `download_query` add column `coll_id` int not null default 0; +update `download_query` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `download_query_coll_id_idx` on `download_query`(`coll_id`); +alter table `download_query` add constraint `download_query_coll_id_fkey` +foreign key (`coll_id`) references `collective`(`id`); +alter table `download_query` drop constraint `download_query_ibfk_1`; +alter table `download_query` drop column `cid`; +alter table `download_query` alter column `coll_id` drop default; + +-- change references: custom_field +alter table `custom_field` add column `coll_id` int not null default 0; +update `custom_field` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `custom_field_coll_id_idx` on `custom_field`(`coll_id`); +create unique index `custom_field_coll_id_name_idx` on `custom_field`(`coll_id`, `name`); +alter table `custom_field` add constraint `custom_field_coll_id_name_key` unique(`coll_id`, `name`); +alter table `custom_field` add constraint `custom_field_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `custom_field` drop constraint `custom_field_ibfk_1`; +alter table `custom_field` drop constraint `cid`; +alter table `custom_field` drop column `cid`; +alter table `custom_field` alter column `coll_id` drop default; + +-- change references: collective_password +alter table `collective_password` add column `coll_id` int not null default 0; +update `collective_password` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `collective_password_coll_id_idx` on `collective_password`(`coll_id`); +alter table `collective_password` add constraint `collective_password_coll_id_fkey` +foreign key (`coll_id`) references `collective`(`id`); +alter table `collective_password` drop constraint `collective_password_ibfk_1`; +alter table `collective_password` drop column `cid`; +alter table `collective_password` alter column `coll_id` drop default; + +-- change references: client_settings_collective +alter table `client_settings_collective` add column `coll_id` int not null default 0; +update `client_settings_collective` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `client_settings_collective_coll_id_idx` on `client_settings_collective`(`coll_id`); +create unique index `client_settings_collective_coll_id_client_id_idx` on `client_settings_collective`(`coll_id`, `client_id`); +alter table `client_settings_collective` add constraint `client_settings_collective_coll_id_name_key` unique(`coll_id`, `client_id`); +alter table `client_settings_collective` add constraint `client_settings_collective_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `client_settings_collective` drop constraint `client_settings_collective_ibfk_1`; +alter table `client_settings_collective` drop constraint `client_id`; +alter table `client_settings_collective` drop column `cid`; +alter table `client_settings_collective` alter column `coll_id` drop default; + +-- change references: classifier_setting +alter table `classifier_setting` add column `coll_id` int not null default 0; +update `classifier_setting` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `classifier_setting_coll_id_idx` on `classifier_setting`(`coll_id`); +alter table `classifier_setting` add constraint `classifier_setting_coll_id_fkey` + foreign key (`coll_id`) references `collective`(`id`); +alter table `classifier_setting` drop constraint `classifier_setting_ibfk_1`; +alter table `classifier_setting` drop column `cid`; +alter table `classifier_setting` alter column `coll_id` drop default; + +-- change references: classifier_model +alter table `classifier_model` add column `coll_id` int not null default 0; +update `classifier_model` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `classifier_model_coll_id_idx` on `classifier_model`(`coll_id`); +create unique index `classifier_model_coll_id_name_idx` on `classifier_model`(`coll_id`, `name`); +alter table `classifier_model` add constraint `classifier_model_coll_id_name_key` unique(`coll_id`, `name`); +alter table `classifier_model` add constraint `classifier_model_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `classifier_model` drop constraint `classifier_model_ibfk_1`; +alter table `classifier_model` drop constraint `cid`; +alter table `classifier_model` drop column `cid`; +alter table `classifier_model` alter column `coll_id` drop default; + +-- change references: addon_run_config +alter table `addon_run_config` add column `coll_id` int not null default 0; +update `addon_run_config` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +create index `addon_run_config_coll_id_idx` on `addon_run_config`(`coll_id`); +alter table `addon_run_config` add constraint `addon_run_config_coll_id_fkey` + foreign key (`coll_id`) references `collective`(`id`); +alter table `addon_run_config` drop constraint `addon_run_config_ibfk_1`; +alter table `addon_run_config` drop column `cid`; +alter table `addon_run_config` alter column `coll_id` drop default; + +-- change references: addon_archive +alter table `addon_archive` add column `coll_id` int not null default 0; +update `addon_archive` t set `coll_id` = (select id from collective where `cid` = t.`cid`); +alter table `addon_archive` drop index `cid`; +alter table `addon_archive` drop index `cid_2`; + +alter table `addon_archive` drop constraint `addon_archive_ibfk_1`; +alter table `addon_archive` drop index `addon_archive_cid_idx`; + +create index `addon_archive_coll_id_idx` on `addon_archive`(`coll_id`); +create unique index `addon_archive_coll_id_name_version_idx` on `addon_archive`(`coll_id`, `name`, `version`); +create unique index `addon_archive_coll_id_original_url_idx` on `addon_archive`(`coll_id`, `original_url`); +alter table `addon_archive` add constraint `addon_archive_coll_id_name_version_key` unique(`coll_id`, `name`, `version`); +alter table `addon_archive` add constraint `addon_archive_coll_id_original_url_key` unique(`coll_id`, `original_url`); +alter table `addon_archive` add constraint `addon_archive_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); +alter table `addon_archive` drop column `cid`; +alter table `addon_archive` alter column `coll_id` drop default; + +-- change primary key +alter table `collective` drop primary key; +alter table `collective` add constraint `collective_id_pkey` primary key (`id`); +alter table `collective` rename column `cid` to `name`; +create unique index `collective_name_idx` on `collective`(`name`); +alter table `collective` add constraint `collective_name_key` unique(`name`); diff --git a/modules/store/src/main/resources/db/migration/postgresql/V1.39.0__collective_id.sql b/modules/store/src/main/resources/db/migration/postgresql/V1.39.0__collective_id.sql new file mode 100644 index 00000000..6cc6fe15 --- /dev/null +++ b/modules/store/src/main/resources/db/migration/postgresql/V1.39.0__collective_id.sql @@ -0,0 +1,215 @@ +-- add new id column +alter table "collective" add column "id" bigserial not null; +create unique index "collective_id_idx" on "collective"("id"); + +-- change references: source +alter table "source" add column "coll_id" bigint not null default 0; +update "source" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "source_coll_id_idx" on "source"("coll_id"); +alter table "source" add constraint "source_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "source" drop constraint "source_cid_fkey"; +alter table "source" drop column "cid"; +alter table "source" alter column "coll_id" drop default; + +-- change references: tag +alter table "tag" add column "coll_id" bigint not null default 0; +update "tag" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "tag_coll_id_idx" on "tag"("coll_id"); +create unique index "tag_coll_id_name_idx" on "tag"("coll_id", "name"); +alter table "tag" add constraint "tag_coll_id_name_key" unique using index "tag_coll_id_name_idx"; +alter table "tag" add constraint "tag_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "tag" drop constraint "tag_cid_fkey"; +alter table "tag" drop column "cid"; +alter table "tag" alter column "coll_id" drop default; + +-- change references: user_ +alter table "user_" add column "coll_id" bigint not null default 0; +update "user_" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "user__coll_id_idx" on "user_"("coll_id"); +create unique index "user__coll_id_login_idx" on "user_"("coll_id", "login"); +alter table "user_" add constraint "user__coll_id_login_key" unique using index "user__coll_id_login_idx"; +alter table "user_" add constraint "user__coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "user_" drop constraint "user__cid_fkey"; +alter table "user_" drop column "cid"; +alter table "user_" alter column "coll_id" drop default; + +-- change references: query_bookmark +alter table "query_bookmark" add column "coll_id" bigint not null default 0; +update "query_bookmark" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "query_bookmark_coll_id_idx" on "query_bookmark"("coll_id"); +create unique index "query_bookmark_coll_id__user_id_name_idx" on "query_bookmark"("coll_id", "__user_id", "name"); +alter table "query_bookmark" add constraint "query_bookmark_coll_id__user_id_name_key" unique using index "query_bookmark_coll_id__user_id_name_idx"; +alter table "query_bookmark" add constraint "query_bookmark_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "query_bookmark" drop constraint "query_bookmark_cid_fkey"; +alter table "query_bookmark" drop column "cid"; +alter table "query_bookmark" alter column "coll_id" drop default; + +-- change references: person +alter table "person" add column "coll_id" bigint not null default 0; +update "person" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "person_coll_id_idx" on "person"("coll_id"); +create unique index "person_coll_id_name_idx" on "person"("coll_id", "name"); +alter table "person" add constraint "person_coll_id_name_key" unique using index "person_coll_id_name_idx"; +alter table "person" add constraint "person_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "person" drop constraint "person_cid_fkey"; +alter table "person" drop column "cid"; +alter table "person" alter column "coll_id" drop default; + +-- change references: organization +alter table "organization" add column "coll_id" bigint not null default 0; +update "organization" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "organization_coll_id_idx" on "organization"("coll_id"); +create unique index "organization_coll_id_name_idx" on "organization"("coll_id", "name"); +alter table "organization" add constraint "organization_coll_id_name_key" unique using index "organization_coll_id_name_idx"; +alter table "organization" add constraint "organization_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "organization" drop constraint "organization_cid_fkey"; +alter table "organization" drop column "cid"; +alter table "organization" alter column "coll_id" drop default; + +-- change references: item_link +alter table "item_link" add column "coll_id" bigint not null default 0; +update "item_link" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "item_link_coll_id_idx" on "item_link"("coll_id"); +create unique index "item_link_coll_id_item1_item2_idx" on "item_link"("coll_id", "item1", "item2"); +alter table "item_link" add constraint "item_link_coll_id_item1_item2_key" unique using index "item_link_coll_id_item1_item2_idx"; +alter table "item_link" add constraint "item_link_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "item_link" drop constraint "item_link_cid_fkey"; +alter table "item_link" drop column "cid"; +alter table "item_link" alter column "coll_id" drop default; + +-- change references: item +alter table "item" add column "coll_id" bigint not null default 0; +update "item" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "item_coll_id_idx" on "item"("coll_id"); +alter table "item" add constraint "item_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "item" drop constraint "item_cid_fkey"; +alter table "item" drop column "cid"; +alter table "item" alter column "coll_id" drop default; + +-- change references: folder +alter table "folder" add column "coll_id" bigint not null default 0; +update "folder" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "folder_coll_id_idx" on "folder"("coll_id"); +create unique index "folder_coll_id_name_idx" on "folder"("coll_id", "name"); +alter table "folder" add constraint "folder_coll_id_name_key" unique using index "folder_coll_id_name_idx"; +alter table "folder" add constraint "folder_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "folder" drop constraint "folder_cid_fkey"; +alter table "folder" drop column "cid"; +alter table "folder" alter column "coll_id" drop default; + +-- change references: equipment +alter table "equipment" add column "coll_id" bigint not null default 0; +update "equipment" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "equipment_coll_id_idx" on "equipment"("coll_id"); +create unique index "equipment_coll_id_name_idx" on "equipment"("coll_id", "name"); +alter table "equipment" add constraint "equipment_coll_id_name_key" unique using index "equipment_coll_id_name_idx"; +alter table "equipment" add constraint "equipment_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "equipment" drop constraint "equipment_cid_fkey"; +alter table "equipment" drop constraint "equipment_cid_eid_key"; +alter table "equipment" drop column "cid"; +alter table "equipment" alter column "coll_id" drop default; + +-- change references: empty_trash_setting +alter table "empty_trash_setting" add column "coll_id" bigint not null default 0; +update "empty_trash_setting" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "empty_trash_setting_coll_id_idx" on "empty_trash_setting"("coll_id"); +alter table "empty_trash_setting" add constraint "empty_trash_setting_coll_id_fkey" +foreign key ("coll_id") references "collective"("id"); +alter table "empty_trash_setting" drop constraint "empty_trash_setting_cid_fkey"; +alter table "empty_trash_setting" drop column "cid"; +alter table "empty_trash_setting" alter column "coll_id" drop default; +alter table "empty_trash_setting" add primary key(coll_id); + +-- change references: download_query +alter table "download_query" add column "coll_id" bigint not null default 0; +update "download_query" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "download_query_coll_id_idx" on "download_query"("coll_id"); +alter table "download_query" add constraint "download_query_coll_id_fkey" +foreign key ("coll_id") references "collective"("id"); +alter table "download_query" drop constraint "download_query_cid_fkey"; +alter table "download_query" drop column "cid"; +alter table "download_query" alter column "coll_id" drop default; + +-- change references: custom_field +alter table "custom_field" add column "coll_id" bigint not null default 0; +update "custom_field" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "custom_field_coll_id_idx" on "custom_field"("coll_id"); +create unique index "custom_field_coll_id_name_idx" on "custom_field"("coll_id", "name"); +alter table "custom_field" add constraint "custom_field_coll_id_name_key" unique using index "custom_field_coll_id_name_idx"; +alter table "custom_field" add constraint "custom_field_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "custom_field" drop constraint "custom_field_cid_fkey"; +alter table "custom_field" drop column "cid"; +alter table "custom_field" alter column "coll_id" drop default; + +-- change references: collective_password +alter table "collective_password" add column "coll_id" bigint not null default 0; +update "collective_password" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "collective_password_coll_id_idx" on "collective_password"("coll_id"); +alter table "collective_password" add constraint "collective_password_coll_id_fkey" +foreign key ("coll_id") references "collective"("id"); +alter table "collective_password" drop constraint "collective_password_cid_fkey"; +alter table "collective_password" drop column "cid"; +alter table "collective_password" alter column "coll_id" drop default; + +-- change references: client_settings_collective +alter table "client_settings_collective" add column "coll_id" bigint not null default 0; +update "client_settings_collective" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "client_settings_collective_coll_id_idx" on "client_settings_collective"("coll_id"); +create unique index "client_settings_collective_coll_id_client_id_idx" on "client_settings_collective"("coll_id", "client_id"); +alter table "client_settings_collective" add constraint "client_settings_collective_coll_id_name_key" unique using index "client_settings_collective_coll_id_client_id_idx"; +alter table "client_settings_collective" add constraint "client_settings_collective_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "client_settings_collective" drop constraint "client_settings_collective_cid_fkey"; +alter table "client_settings_collective" drop column "cid"; +alter table "client_settings_collective" alter column "coll_id" drop default; + +-- change references: classifier_setting +alter table "classifier_setting" add column "coll_id" bigint not null default 0; +update "classifier_setting" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "classifier_setting_coll_id_idx" on "classifier_setting"("coll_id"); +alter table "classifier_setting" add constraint "classifier_setting_coll_id_fkey" + foreign key ("coll_id") references "collective"("id"); +alter table "classifier_setting" drop constraint "classifier_setting_cid_fkey"; +alter table "classifier_setting" drop column "cid"; +alter table "classifier_setting" alter column "coll_id" drop default; + +-- change references: classifier_model +alter table "classifier_model" add column "coll_id" bigint not null default 0; +update "classifier_model" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "classifier_model_coll_id_idx" on "classifier_model"("coll_id"); +create unique index "classifier_model_coll_id_name_idx" on "classifier_model"("coll_id", "name"); +alter table "classifier_model" add constraint "classifier_model_coll_id_name_key" unique using index "classifier_model_coll_id_name_idx"; +alter table "classifier_model" add constraint "classifier_model_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "classifier_model" drop constraint "classifier_model_cid_fkey"; +alter table "classifier_model" drop column "cid"; +alter table "classifier_model" alter column "coll_id" drop default; + +-- change references: addon_run_config +alter table "addon_run_config" add column "coll_id" bigint not null default 0; +update "addon_run_config" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "addon_run_config_coll_id_idx" on "addon_run_config"("coll_id"); +alter table "addon_run_config" add constraint "addon_run_config_coll_id_fkey" + foreign key ("coll_id") references "collective"("id"); +alter table "addon_run_config" drop constraint "addon_run_config_cid_fkey"; +alter table "addon_run_config" drop column "cid"; +alter table "addon_run_config" alter column "coll_id" drop default; + +-- change references: addon_archive +alter table "addon_archive" add column "coll_id" bigint not null default 0; +update "addon_archive" t set "coll_id" = (select id from collective where "cid" = t."cid"); +create index "addon_archive_coll_id_idx" on "addon_archive"("coll_id"); +create unique index "addon_archive_coll_id_name_version_idx" on "addon_archive"("coll_id", "name", "version"); +create unique index "addon_archive_coll_id_original_url_idx" on "addon_archive"("coll_id", "original_url"); +alter table "addon_archive" add constraint "addon_archive_coll_id_name_version_key" unique using index "addon_archive_coll_id_name_version_idx"; +alter table "addon_archive" add constraint "addon_archive_coll_id_original_url_key" unique using index "addon_archive_coll_id_original_url_idx"; +alter table "addon_archive" add constraint "addon_archive_coll_id_fkey" foreign key ("coll_id") references "collective"("id"); +alter table "addon_archive" drop constraint "addon_archive_cid_fkey"; +alter table "addon_archive" drop column "cid"; +alter table "addon_archive" alter column "coll_id" drop default; + + +-- change primary key +alter table "collective" drop constraint "collective_pkey"; +alter table "collective" add constraint "collective_id_pkey" primary key ("id"); +alter table "collective" rename column "cid" to "name"; +create unique index "collective_name_idx" on "collective"("name"); +alter table "collective" add constraint "collective_name_key" unique using index "collective_name_idx"; From 77f22bb5ea9d46262cf421d7e09b1b103423f455 Mon Sep 17 00:00:00 2001 From: eikek Date: Tue, 5 Jul 2022 21:17:18 +0200 Subject: [PATCH 02/15] Adopt store module to new collective table --- .../docspell/backend/auth/AuthToken.scala | 9 +- .../scala/docspell/backend/auth/Login.scala | 37 ++++---- .../backend/ops/OQueryBookmarks.scala | 58 ++++++------ .../scala/docspell/backend/ops/OTotp.scala | 93 +++++++++---------- .../scala/docspell/common/AccountInfo.scala | 48 ++++++++++ .../scala/docspell/common/CollectiveId.scala | 26 ++++++ .../docspell/notification/api/Event.scala | 40 ++++---- .../notification/api/EventContext.scala | 2 +- .../restserver/routes/TotpRoutes.scala | 2 +- .../scala/db/migration/MigrationTasks.scala | 10 +- .../docspell/store/impl/DoobieMeta.scala | 3 + .../qb/generator/ItemQueryGenerator.scala | 8 +- .../docspell/store/queries/ItemData.scala | 2 +- .../docspell/store/queries/QAttachment.scala | 16 ++-- .../docspell/store/queries/QCollective.scala | 8 +- .../docspell/store/queries/QCustomField.scala | 10 +- .../docspell/store/queries/QFolder.scala | 74 ++++++++------- .../scala/docspell/store/queries/QItem.scala | 76 +++++++++------ .../scala/docspell/store/queries/QLogin.scala | 15 ++- .../scala/docspell/store/queries/QMails.scala | 13 ++- .../store/queries/QNotification.scala | 4 +- .../store/queries/QOrganization.scala | 22 ++--- .../scala/docspell/store/queries/QUser.scala | 25 ++--- .../scala/docspell/store/queries/Query.scala | 4 +- .../store/records/AddonRunConfigData.scala | 8 +- .../records/AddonRunConfigResolved.scala | 4 +- .../store/records/RAddonArchive.scala | 25 ++--- .../store/records/RAddonRunConfig.scala | 10 +- .../docspell/store/records/RAttachment.scala | 20 ++-- .../store/records/RAttachmentArchive.scala | 4 +- .../store/records/RAttachmentPreview.scala | 4 +- .../store/records/RAttachmentSource.scala | 2 +- .../store/records/RClassifierModel.scala | 17 ++-- .../store/records/RClassifierSetting.scala | 15 ++- .../records/RClientSettingsCollective.scala | 15 +-- .../docspell/store/records/RCollective.scala | 37 +++++--- .../store/records/RCollectivePassword.scala | 12 +-- .../docspell/store/records/RCustomField.scala | 18 ++-- .../store/records/RDownloadQuery.scala | 4 +- .../store/records/REmptyTrashSetting.scala | 10 +- .../docspell/store/records/REquipment.scala | 12 +-- .../docspell/store/records/RFolder.scala | 18 ++-- .../scala/docspell/store/records/RItem.scala | 73 +++++++++------ .../docspell/store/records/RItemLink.scala | 16 ++-- .../store/records/RNotificationChannel.scala | 20 ++-- .../records/RNotificationChannelGotify.scala | 16 +--- .../records/RNotificationChannelHttp.scala | 13 +-- .../records/RNotificationChannelMail.scala | 20 ++-- .../records/RNotificationChannelMatrix.scala | 18 ++-- .../store/records/RNotificationHook.scala | 18 ++-- .../store/records/ROrganization.scala | 20 ++-- .../docspell/store/records/RPerson.scala | 20 ++-- .../store/records/RQueryBookmark.scala | 65 ++++++------- .../scala/docspell/store/records/RShare.scala | 10 +- .../docspell/store/records/RSource.scala | 14 +-- .../scala/docspell/store/records/RTag.scala | 19 ++-- .../docspell/store/records/RTagItem.scala | 2 +- .../scala/docspell/store/records/RTotp.scala | 48 ++++++---- .../scala/docspell/store/records/RUser.scala | 66 +++++++------ .../docspell/store/records/RUserEmail.scala | 30 ++---- .../docspell/store/records/RUserImap.scala | 31 +++---- .../docspell/store/records/SourceData.scala | 4 +- .../docspell/store/fts/TempFtsOpsTest.scala | 45 ++++++--- .../generator/ItemQueryGeneratorTest.scala | 8 +- .../docspell/store/migrate/MigrateTest.scala | 2 - 65 files changed, 783 insertions(+), 635 deletions(-) create mode 100644 modules/common/src/main/scala/docspell/common/AccountInfo.scala create mode 100644 modules/common/src/main/scala/docspell/common/CollectiveId.scala diff --git a/modules/backend/src/main/scala/docspell/backend/auth/AuthToken.scala b/modules/backend/src/main/scala/docspell/backend/auth/AuthToken.scala index 45a34c9a..1c5a2c73 100644 --- a/modules/backend/src/main/scala/docspell/backend/auth/AuthToken.scala +++ b/modules/backend/src/main/scala/docspell/backend/auth/AuthToken.scala @@ -18,12 +18,13 @@ import scodec.bits.ByteVector case class AuthToken( nowMillis: Long, - account: AccountId, + account: AccountInfo, requireSecondFactor: Boolean, valid: Option[Duration], salt: String, sig: String ) { + def asString = valid match { case Some(v) => @@ -63,7 +64,7 @@ object AuthToken { for { millis <- TokenUtil.asInt(ms).toRight("Cannot read authenticator data") acc <- TokenUtil.b64dec(as).toRight("Cannot read authenticator data") - accId <- AccountId.parse(acc) + accId <- AccountInfo.parse(acc) twofac <- Right[String, Boolean](java.lang.Boolean.parseBoolean(fa)) valid <- TokenUtil .asInt(vs) @@ -75,7 +76,7 @@ object AuthToken { for { millis <- TokenUtil.asInt(ms).toRight("Cannot read authenticator data") acc <- TokenUtil.b64dec(as).toRight("Cannot read authenticator data") - accId <- AccountId.parse(acc) + accId <- AccountInfo.parse(acc) twofac <- Right[String, Boolean](java.lang.Boolean.parseBoolean(fa)) } yield AuthToken(millis, accId, twofac, None, salt, sig) @@ -84,7 +85,7 @@ object AuthToken { } def user[F[_]: Sync]( - accountId: AccountId, + accountId: AccountInfo, requireSecondFactor: Boolean, key: ByteVector, valid: Option[Duration] diff --git a/modules/backend/src/main/scala/docspell/backend/auth/Login.scala b/modules/backend/src/main/scala/docspell/backend/auth/Login.scala index 7d986cc7..0b36f733 100644 --- a/modules/backend/src/main/scala/docspell/backend/auth/Login.scala +++ b/modules/backend/src/main/scala/docspell/backend/auth/Login.scala @@ -96,10 +96,12 @@ object Login { for { data <- store.transact(QLogin.findUser(accountId)) _ <- logF.trace(s"Account lookup: $data") - res <- - if (data.exists(checkNoPassword(_, Set(AccountSource.OpenId)))) - doLogin(config, accountId, false) - else Result.invalidAuth.pure[F] + res <- data match { + case Some(d) if checkNoPassword(d, Set(AccountSource.OpenId)) => + doLogin(config, d.account, false) + case _ => + Result.invalidAuth.pure[F] + } } yield res def loginSession(config: Config)(sessionKey: String): F[Result] = @@ -122,9 +124,12 @@ object Login { for { data <- store.transact(QLogin.findUser(acc)) _ <- logF.trace(s"Account lookup: $data") - res <- - if (data.exists(check(up.pass))) doLogin(config, acc, up.rememberMe) - else Result.invalidAuth.pure[F] + res <- data match { + case Some(d) if check(up.pass)(d) => + doLogin(config, d.account, up.rememberMe) + case _ => + Result.invalidAuth.pure[F] + } } yield res case Left(_) => logF.info(s"User authentication failed for: ${up.hidePass}") *> @@ -162,7 +167,7 @@ object Login { (for { _ <- validateToken key <- EitherT.fromOptionF( - store.transact(RTotp.findEnabledByLogin(sf.token.account, true)), + store.transact(RTotp.findEnabledByLogin(sf.token.account.userId, true)), Result.invalidAuth ) now <- EitherT.right[Result](Timestamp.current[F]) @@ -175,13 +180,13 @@ object Login { } def loginRememberMe(config: Config)(token: String): F[Result] = { - def okResult(acc: AccountId) = + def okResult(acc: AccountInfo) = for { _ <- store.transact(RUser.updateLogin(acc)) token <- AuthToken.user(acc, false, config.serverSecret, None) } yield Result.ok(token, None) - def doLogin(rid: Ident) = + def rememberedLogin(rid: Ident) = (for { now <- OptionT.liftF(Timestamp.current[F]) minTime = now - config.rememberMe.valid @@ -214,7 +219,7 @@ object Login { else if (rt.isExpired(config.rememberMe.valid)) logF.info(s"RememberMe cookie expired ($rt).") *> Result.invalidTime .pure[F] - else doLogin(rt.rememberId) + else rememberedLogin(rt.rememberId) case Left(err) => logF.info(s"RememberMe cookie was invalid: $err") *> Result.invalidAuth .pure[F] @@ -245,11 +250,11 @@ object Login { private def doLogin( config: Config, - acc: AccountId, + acc: AccountInfo, rememberMe: Boolean ): F[Result] = for { - require2FA <- store.transact(RTotp.isEnabled(acc)) + require2FA <- store.transact(RTotp.isEnabled(acc.userId)) _ <- if (require2FA) ().pure[F] else store.transact(RUser.updateLogin(acc)) @@ -263,13 +268,11 @@ object Login { private def insertRememberToken( store: Store[F], - acc: AccountId, + acc: AccountInfo, config: Config ): F[RememberToken] = for { - uid <- OptionT(store.transact(RUser.findIdByAccount(acc))) - .getOrRaise(new IllegalStateException(s"No user_id found for account: $acc")) - rme <- RRememberMe.generate[F](uid) + rme <- RRememberMe.generate[F](acc.userId) _ <- store.transact(RRememberMe.insert(rme)) token <- RememberToken.user(rme.id, config.serverSecret) } yield token diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OQueryBookmarks.scala b/modules/backend/src/main/scala/docspell/backend/ops/OQueryBookmarks.scala index f1ef2396..a22a33e3 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OQueryBookmarks.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OQueryBookmarks.scala @@ -6,7 +6,6 @@ package docspell.backend.ops -import cats.data.OptionT import cats.effect._ import cats.implicits._ @@ -19,19 +18,19 @@ import docspell.store.records._ trait OQueryBookmarks[F[_]] { - def getAll(account: AccountId): F[Vector[OQueryBookmarks.Bookmark]] + def getAll(account: AccountInfo): F[Vector[OQueryBookmarks.Bookmark]] - def findOne(account: AccountId, nameOrId: String): F[Option[OQueryBookmarks.Bookmark]] + def findOne(account: AccountInfo, nameOrId: String): F[Option[OQueryBookmarks.Bookmark]] - def create(account: AccountId, bookmark: OQueryBookmarks.NewBookmark): F[AddResult] + def create(account: AccountInfo, bookmark: OQueryBookmarks.NewBookmark): F[AddResult] def update( - account: AccountId, + account: AccountInfo, id: Ident, bookmark: OQueryBookmarks.NewBookmark ): F[UpdateResult] - def delete(account: AccountId, bookmark: Ident): F[Unit] + def delete(account: AccountInfo, bookmark: Ident): F[Unit] } object OQueryBookmarks { @@ -53,39 +52,43 @@ object OQueryBookmarks { def apply[F[_]: Sync](store: Store[F]): Resource[F, OQueryBookmarks[F]] = Resource.pure(new OQueryBookmarks[F] { - def getAll(account: AccountId): F[Vector[Bookmark]] = + def getAll(account: AccountInfo): F[Vector[Bookmark]] = store - .transact(RQueryBookmark.allForUser(account)) + .transact(RQueryBookmark.allForUser(account.collectiveId, account.userId)) .map(_.map(convert.toModel)) def findOne( - account: AccountId, + account: AccountInfo, nameOrId: String ): F[Option[OQueryBookmarks.Bookmark]] = store - .transact(RQueryBookmark.findByNameOrId(account, nameOrId)) + .transact( + RQueryBookmark.findByNameOrId(account.collectiveId, account.userId, nameOrId) + ) .map(_.map(convert.toModel)) - def create(account: AccountId, b: NewBookmark): F[AddResult] = { + def create(account: AccountInfo, b: NewBookmark): F[AddResult] = { + val uid = if (b.personal) account.userId.some else None val record = - RQueryBookmark.createNew(account, b.name, b.label, b.query, b.personal) - store.transact(RQueryBookmark.insertIfNotExists(account, record)) + RQueryBookmark.createNew( + account.collectiveId, + uid, + b.name, + b.label, + b.query + ) + store.transact( + RQueryBookmark.insertIfNotExists(account.collectiveId, account.userId, record) + ) } - def update(account: AccountId, id: Ident, b: NewBookmark): F[UpdateResult] = + def update(acc: AccountInfo, id: Ident, b: NewBookmark): F[UpdateResult] = UpdateResult.fromUpdate( - store.transact { - (for { - userId <- OptionT(RUser.findIdByAccount(account)) - n <- OptionT.liftF( - RQueryBookmark.update(convert.toRecord(account, id, userId, b)) - ) - } yield n).getOrElse(0) - } + store.transact(RQueryBookmark.update(convert.toRecord(acc, id, b))) ) - def delete(account: AccountId, bookmark: Ident): F[Unit] = - store.transact(RQueryBookmark.deleteById(account.collective, bookmark)).as(()) + def delete(account: AccountInfo, bookmark: Ident): F[Unit] = + store.transact(RQueryBookmark.deleteById(account.collectiveId, bookmark)).as(()) }) private object convert { @@ -94,17 +97,16 @@ object OQueryBookmarks { Bookmark(r.id, r.name, r.label, r.query, r.isPersonal, r.created) def toRecord( - account: AccountId, + account: AccountInfo, id: Ident, - userId: Ident, b: NewBookmark ): RQueryBookmark = RQueryBookmark( id, b.name, b.label, - if (b.personal) userId.some else None, - account.collective, + if (b.personal) account.userId.some else None, + account.collectiveId, b.query, Timestamp.Epoch ) diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OTotp.scala b/modules/backend/src/main/scala/docspell/backend/ops/OTotp.scala index 67ffcdf1..ea11ca6e 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OTotp.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OTotp.scala @@ -6,29 +6,29 @@ package docspell.backend.ops +import cats.data.OptionT import cats.effect._ import cats.implicits._ - import docspell.backend.ops.OTotp.{ConfirmResult, InitResult, OtpState} import docspell.common._ -import docspell.store.records.{RTotp, RUser} +import docspell.store.records.RTotp import docspell.store.{AddResult, Store, UpdateResult} import docspell.totp.{Key, OnetimePassword, Totp} trait OTotp[F[_]] { /** Return whether TOTP is enabled for this account or not. */ - def state(accountId: AccountId): F[OtpState] + def state(accountId: AccountInfo): F[OtpState] /** Initializes TOTP by generating a secret and storing it in the database. TOTP is * still disabled, it must be confirmed in order to be active. */ - def initialize(accountId: AccountId): F[InitResult] + def initialize(accountId: AccountInfo): F[InitResult] /** Confirms and finishes initialization. TOTP is active after this for the given * account. */ - def confirmInit(accountId: AccountId, otp: OnetimePassword): F[ConfirmResult] + def confirmInit(accountId: AccountInfo, otp: OnetimePassword): F[ConfirmResult] /** Disables TOTP and removes the shared secret. If a otp is specified, it must be * valid. @@ -57,7 +57,7 @@ object OTotp { sealed trait InitResult object InitResult { - final case class Success(accountId: AccountId, key: Key) extends InitResult { + final case class Success(accountId: AccountInfo, key: Key) extends InitResult { def authenticatorUrl(issuer: String): LenientUri = LenientUri.unsafe( s"otpauth://totp/$issuer:${accountId.asString}?secret=${key.data.toBase32}&issuer=$issuer" @@ -67,7 +67,7 @@ object OTotp { case object NotFound extends InitResult final case class Failed(ex: Throwable) extends InitResult - def success(accountId: AccountId, key: Key): InitResult = + def success(accountId: AccountInfo, key: Key): InitResult = Success(accountId, key) def alreadyExists: InitResult = AlreadyExists @@ -85,47 +85,41 @@ object OTotp { Resource.pure[F, OTotp[F]](new OTotp[F] { val log = docspell.logging.getLogger[F] - def initialize(accountId: AccountId): F[InitResult] = + def initialize(accountId: AccountInfo): F[InitResult] = for { _ <- log.info(s"Initializing TOTP for account ${accountId.asString}") - userId <- store.transact(RUser.findIdByAccount(accountId)) - result <- userId match { - case Some(uid) => - for { - record <- RTotp.generate[F](uid, totp.settings.mac) - un <- store.transact(RTotp.updateDisabled(record)) - an <- - if (un != 0) - AddResult.entityExists("Entity exists, but update was ok").pure[F] - else store.add(RTotp.insert(record), RTotp.existsByLogin(accountId)) - innerResult <- - if (un != 0) InitResult.success(accountId, record.secret).pure[F] - else - an match { - case AddResult.EntityExists(msg) => - log.warn( - s"A totp record already exists for account '${accountId.asString}': $msg!" - ) *> - InitResult.alreadyExists.pure[F] - case AddResult.Failure(ex) => - log.warn( - s"Failed to setup totp record for '${accountId.asString}': ${ex.getMessage}" - ) *> - InitResult.failed(ex).pure[F] - case AddResult.Success => - InitResult.success(accountId, record.secret).pure[F] - } - } yield innerResult - case None => - log.warn(s"No user found for account: ${accountId.asString}!") *> - InitResult.NotFound.pure[F] - } + result <- for { + record <- RTotp.generate[F](accountId.userId, totp.settings.mac) + un <- store.transact(RTotp.updateDisabled(record)) + an <- + if (un != 0) + AddResult.entityExists("Entity exists, but update was ok").pure[F] + else store.add(RTotp.insert(record), RTotp.existsByUserId(accountId.userId)) + innerResult <- + if (un != 0) InitResult.success(accountId, record.secret).pure[F] + else + an match { + case AddResult.EntityExists(msg) => + log.warn( + s"A totp record already exists for account '${accountId.asString}': $msg!" + ) *> + InitResult.alreadyExists.pure[F] + case AddResult.Failure(ex) => + log.warn( + s"Failed to setup totp record for '${accountId.asString}': ${ex.getMessage}" + ) *> + InitResult.failed(ex).pure[F] + case AddResult.Success => + InitResult.success(accountId, record.secret).pure[F] + } + } yield innerResult + } yield result - def confirmInit(accountId: AccountId, otp: OnetimePassword): F[ConfirmResult] = + def confirmInit(accountId: AccountInfo, otp: OnetimePassword): F[ConfirmResult] = for { _ <- log.info(s"Confirm TOTP setup for account ${accountId.asString}") - key <- store.transact(RTotp.findEnabledByLogin(accountId, false)) + key <- store.transact(RTotp.findEnabledByUserId(accountId.userId, false)) now <- Timestamp.current[F] res <- key match { case None => @@ -134,7 +128,7 @@ object OTotp { val check = totp.checkPassword(r.secret, otp, now.value) if (check) store - .transact(RTotp.setEnabled(accountId, true)) + .transact(RTotp.setEnabled(accountId.userId, true)) .map(_ => ConfirmResult.Success) else ConfirmResult.Failed.pure[F] } @@ -154,7 +148,7 @@ object OTotp { val check = totp.checkPassword(r.secret, pw, now.value) if (check) UpdateResult.fromUpdate( - store.transact(RTotp.setEnabled(accountId, false)) + store.transact(RTotp.setEnabled(r.userId, false)) ) else log.info(s"TOTP code was invalid. Not disabling it.") *> UpdateResult @@ -163,12 +157,17 @@ object OTotp { } } yield res case None => - UpdateResult.fromUpdate(store.transact(RTotp.setEnabled(accountId, false))) + UpdateResult.fromUpdate { + (for { + key <- OptionT(RTotp.findEnabledByLogin(accountId, true)) + n <- OptionT.liftF(RTotp.setEnabled(key.userId, false)) + } yield n).mapK(store.transform).getOrElse(0) + } } - def state(accountId: AccountId): F[OtpState] = + def state(acc: AccountInfo): F[OtpState] = for { - record <- store.transact(RTotp.findEnabledByLogin(accountId, true)) + record <- store.transact(RTotp.findEnabledByUserId(acc.userId, true)) result = record match { case Some(r) => OtpState.Enabled(r.created) diff --git a/modules/common/src/main/scala/docspell/common/AccountInfo.scala b/modules/common/src/main/scala/docspell/common/AccountInfo.scala new file mode 100644 index 00000000..de02a6a5 --- /dev/null +++ b/modules/common/src/main/scala/docspell/common/AccountInfo.scala @@ -0,0 +1,48 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package docspell.common + +import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} +import io.circe.{Decoder, Encoder} + +final case class AccountInfo( + collectiveId: CollectiveId, + collective: Ident, + userId: Ident, + login: Ident +) { + + def asAccountId: AccountId = + AccountId(collective, login) + + def asString: String = + s"${collectiveId.value}/${collective.id}/${userId.id}/${login.id}" +} + +object AccountInfo { + + implicit val jsonDecoder: Decoder[AccountInfo] = deriveDecoder + implicit val jsonEncoder: Encoder[AccountInfo] = deriveEncoder + + def parse(str: String): Either[String, AccountInfo] = { + val input = str.replaceAll("\\s+", "").trim + val invalid: Either[String, AccountInfo] = + Left(s"Cannot parse account info: $str") + + input.split('/').toList match { + case collId :: collName :: userId :: login :: Nil => + for { + cid <- collId.toLongOption.toRight(s"Invalid collective id: $collId") + cn <- Ident.fromString(collName) + uid <- Ident.fromString(userId) + un <- Ident.fromString(login) + } yield AccountInfo(CollectiveId(cid), cn, uid, un) + case _ => + invalid + } + } +} diff --git a/modules/common/src/main/scala/docspell/common/CollectiveId.scala b/modules/common/src/main/scala/docspell/common/CollectiveId.scala new file mode 100644 index 00000000..4a8fdfe4 --- /dev/null +++ b/modules/common/src/main/scala/docspell/common/CollectiveId.scala @@ -0,0 +1,26 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package docspell.common + +import io.circe.{Decoder, Encoder} + +final class CollectiveId(val value: Long) extends AnyVal { + + override def toString = + s"CollectiveId($value)" +} + +object CollectiveId { + val unknown: CollectiveId = CollectiveId(-1) + + def apply(n: Long): CollectiveId = new CollectiveId(n) + + implicit val jsonEncoder: Encoder[CollectiveId] = + Encoder.encodeLong.contramap(_.value) + implicit val jsonDecoder: Decoder[CollectiveId] = + Decoder.decodeLong.map(CollectiveId.apply) +} diff --git a/modules/notification/api/src/main/scala/docspell/notification/api/Event.scala b/modules/notification/api/src/main/scala/docspell/notification/api/Event.scala index f5aaee15..47519dfc 100644 --- a/modules/notification/api/src/main/scala/docspell/notification/api/Event.scala +++ b/modules/notification/api/src/main/scala/docspell/notification/api/Event.scala @@ -21,7 +21,7 @@ sealed trait Event { def eventType: EventType /** The user who caused it. */ - def account: AccountId + def account: AccountInfo /** The base url for generating links. This is dynamic. */ def baseUrl: Option[LenientUri] @@ -62,7 +62,7 @@ object Event { /** Event triggered when tags of one or more items have changed */ final case class TagsChanged( - account: AccountId, + account: AccountInfo, items: Nel[Ident], added: List[String], removed: List[String], @@ -75,11 +75,11 @@ object Event { items: Nel[Ident], added: List[String], removed: List[String] - ): (AccountId, Option[LenientUri]) => TagsChanged = + ): (AccountInfo, Option[LenientUri]) => TagsChanged = (acc, url) => TagsChanged(acc, items, added, removed, url) def sample[F[_]: Sync]( - account: AccountId, + account: AccountInfo, baseUrl: Option[LenientUri] ): F[TagsChanged] = for { @@ -91,7 +91,7 @@ object Event { /** Event triggered when a custom field on an item changes. */ final case class SetFieldValue( - account: AccountId, + account: AccountInfo, items: Nel[Ident], field: Ident, value: String, @@ -104,11 +104,11 @@ object Event { items: Nel[Ident], field: Ident, value: String - ): (AccountId, Option[LenientUri]) => SetFieldValue = + ): (AccountInfo, Option[LenientUri]) => SetFieldValue = (acc, url) => SetFieldValue(acc, items, field, value, url) def sample[F[_]: Sync]( - account: AccountId, + account: AccountInfo, baseUrl: Option[LenientUri] ): F[SetFieldValue] = for { @@ -118,7 +118,7 @@ object Event { } final case class DeleteFieldValue( - account: AccountId, + account: AccountInfo, items: Nel[Ident], field: Ident, baseUrl: Option[LenientUri] @@ -129,11 +129,11 @@ object Event { def partial( items: Nel[Ident], field: Ident - ): (AccountId, Option[LenientUri]) => DeleteFieldValue = + ): (AccountInfo, Option[LenientUri]) => DeleteFieldValue = (acc, url) => DeleteFieldValue(acc, items, field, url) def sample[F[_]: Sync]( - account: AccountId, + account: AccountInfo, baseUrl: Option[LenientUri] ): F[DeleteFieldValue] = for { @@ -147,7 +147,7 @@ object Event { * search results. */ final case class ItemSelection( - account: AccountId, + account: AccountInfo, items: Nel[Ident], more: Boolean, baseUrl: Option[LenientUri], @@ -158,7 +158,7 @@ object Event { case object ItemSelection extends EventType { def sample[F[_]: Sync]( - account: AccountId, + account: AccountInfo, baseUrl: Option[LenientUri] ): F[ItemSelection] = for { @@ -169,6 +169,7 @@ object Event { /** Event when a new job is added to the queue */ final case class JobSubmitted( + account: AccountInfo, jobId: Ident, group: Ident, task: Ident, @@ -179,26 +180,27 @@ object Event { ) extends Event { val eventType = JobSubmitted val baseUrl = None - def account: AccountId = AccountId(group, submitter) } case object JobSubmitted extends EventType { - def sample[F[_]: Sync](account: AccountId): F[JobSubmitted] = + def sample[F[_]: Sync](account: AccountInfo): F[JobSubmitted] = for { id <- Ident.randomId[F] ev = JobSubmitted( + account, id, account.collective, Ident.unsafe("process-something-task"), "", JobState.running, "Process 3 files", - account.user + account.login ) } yield ev } /** Event when a job is finished (in final state). */ final case class JobDone( + account: AccountInfo, jobId: Ident, group: Ident, task: Ident, @@ -211,20 +213,20 @@ object Event { ) extends Event { val eventType = JobDone val baseUrl = None - def account: AccountId = AccountId(group, submitter) } case object JobDone extends EventType { - def sample[F[_]: Sync](account: AccountId): F[JobDone] = + def sample[F[_]: Sync](account: AccountInfo): F[JobDone] = for { id <- Ident.randomId[F] ev = JobDone( + account, id, account.collective, Ident.unsafe("process-something-task"), "", JobState.running, "Process 3 files", - account.user, + account.login, Json.Null, None ) @@ -233,7 +235,7 @@ object Event { def sample[F[_]: Sync]( evt: EventType, - account: AccountId, + account: AccountInfo, baseUrl: Option[LenientUri] ): F[Event] = evt match { diff --git a/modules/notification/api/src/main/scala/docspell/notification/api/EventContext.scala b/modules/notification/api/src/main/scala/docspell/notification/api/EventContext.scala index e1bd8d27..c51497ad 100644 --- a/modules/notification/api/src/main/scala/docspell/notification/api/EventContext.scala +++ b/modules/notification/api/src/main/scala/docspell/notification/api/EventContext.scala @@ -25,7 +25,7 @@ trait EventContext { "eventType" -> event.eventType.asJson, "account" -> Json.obj( "collective" -> event.account.collective.asJson, - "user" -> event.account.user.asJson, + "user" -> event.account.login.asJson, "login" -> event.account.asJson ), "content" -> content diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/TotpRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/TotpRoutes.scala index 9b1838eb..1199eb06 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/TotpRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/TotpRoutes.scala @@ -72,7 +72,7 @@ object TotpRoutes { for { data <- req.as[OtpConfirm] result <- backend.totp.disable( - user.account, + user.account.asAccountId, OnetimePassword(data.otp.pass).some ) resp <- Ok(Conversions.basicResult(result, "TOTP setup disabled.")) diff --git a/modules/store/src/main/scala/db/migration/MigrationTasks.scala b/modules/store/src/main/scala/db/migration/MigrationTasks.scala index 39f53f58..7cda04d2 100644 --- a/modules/store/src/main/scala/db/migration/MigrationTasks.scala +++ b/modules/store/src/main/scala/db/migration/MigrationTasks.scala @@ -13,6 +13,7 @@ import cats.implicits._ import docspell.common._ import docspell.common.syntax.StringSyntax._ import docspell.notification.api._ +import docspell.store.queries.QLogin import docspell.store.records._ import db.migration.data._ @@ -122,7 +123,8 @@ trait MigrationTasks { private def saveChannel(ch: Channel, account: AccountId): ConnectionIO[ChannelRef] = (for { newId <- OptionT.liftF(Ident.randomId[ConnectionIO]) - userId <- OptionT(RUser.findIdByAccount(account)) + userData <- OptionT(QLogin.findUser(account)) + userId = userData.account.userId r <- RNotificationChannel.fromChannel(ch, newId, userId) _ <- OptionT.liftF(RNotificationChannel.insert(r)) _ <- OptionT.liftF( @@ -172,7 +174,8 @@ trait MigrationTasks { } for { - userId <- OptionT(RUser.findIdByAccount(old.account)) + userData <- OptionT(QLogin.findUser(old.account)) + userId = userData.account.userId id <- OptionT.liftF(Ident.randomId[ConnectionIO]) now <- OptionT.liftF(Timestamp.current[ConnectionIO]) chName = Some("migrate notify items") @@ -198,8 +201,7 @@ trait MigrationTasks { } def mkTransactor(ctx: Context): Transactor[IO] = { - val xa = Transactor.fromConnection[IO](ctx.getConnection()) + val xa = Transactor.fromConnection[IO](ctx.getConnection) Transactor.strategy.set(xa, Strategy.void) // transactions are handled by flyway } - } diff --git a/modules/store/src/main/scala/docspell/store/impl/DoobieMeta.scala b/modules/store/src/main/scala/docspell/store/impl/DoobieMeta.scala index 6e919330..06c6f913 100644 --- a/modules/store/src/main/scala/docspell/store/impl/DoobieMeta.scala +++ b/modules/store/src/main/scala/docspell/store/impl/DoobieMeta.scala @@ -42,6 +42,9 @@ trait DoobieMeta extends EmilDoobieMeta { e.apply(a).noSpaces ) + implicit val metaCollectiveId: Meta[CollectiveId] = + Meta[Long].timap(CollectiveId.apply)(_.value) + implicit val metaAddonTriggerType: Meta[AddonTriggerType] = Meta[String].timap(AddonTriggerType.unsafeFromString)(_.name) diff --git a/modules/store/src/main/scala/docspell/store/qb/generator/ItemQueryGenerator.scala b/modules/store/src/main/scala/docspell/store/qb/generator/ItemQueryGenerator.scala index f5e63394..d5aa6462 100644 --- a/modules/store/src/main/scala/docspell/store/qb/generator/ItemQueryGenerator.scala +++ b/modules/store/src/main/scala/docspell/store/qb/generator/ItemQueryGenerator.scala @@ -24,12 +24,12 @@ import doobie.util.Put object ItemQueryGenerator { - def apply(today: LocalDate, tables: Tables, coll: Ident)(q: ItemQuery)(implicit + def apply(today: LocalDate, tables: Tables, coll: CollectiveId)(q: ItemQuery)(implicit PT: Put[Timestamp] ): Condition = fromExpr(today, tables, coll)(q.expr) - final def fromExpr(today: LocalDate, tables: Tables, coll: Ident)( + final def fromExpr(today: LocalDate, tables: Tables, coll: CollectiveId)( expr: Expr )(implicit PT: Put[Timestamp]): Condition = expr match { @@ -217,7 +217,7 @@ object ItemQueryGenerator { case Date.Local(date) => date case Date.Millis(ms) => - Instant.ofEpochMilli(ms).atZone(Timestamp.UTC).toLocalDate() + Instant.ofEpochMilli(ms).atZone(Timestamp.UTC).toLocalDate case Date.Today => today } @@ -285,7 +285,7 @@ object ItemQueryGenerator { private def itemsWithCustomField( sel: RCustomField.Table => Condition - )(coll: Ident, op: QOp, value: String): Select = { + )(coll: CollectiveId, op: QOp, value: String): Select = { val cf = RCustomField.as("cf") val cfv = RCustomFieldValue.as("cfv") diff --git a/modules/store/src/main/scala/docspell/store/queries/ItemData.scala b/modules/store/src/main/scala/docspell/store/queries/ItemData.scala index ad5028da..9c47ab84 100644 --- a/modules/store/src/main/scala/docspell/store/queries/ItemData.scala +++ b/modules/store/src/main/scala/docspell/store/queries/ItemData.scala @@ -24,6 +24,6 @@ case class ItemData( relatedItems: Vector[ListItem] ) { - def filterCollective(coll: Ident): Option[ItemData] = + def filterCollective(coll: CollectiveId): Option[ItemData] = if (item.cid == coll) Some(this) else None } diff --git a/modules/store/src/main/scala/docspell/store/queries/QAttachment.scala b/modules/store/src/main/scala/docspell/store/queries/QAttachment.scala index 6e90cc86..ebaae377 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QAttachment.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QAttachment.scala @@ -75,7 +75,7 @@ object QAttachment { */ def deleteSingleAttachment[F[_]: Sync]( store: Store[F] - )(attachId: Ident, coll: Ident): F[Int] = { + )(attachId: Ident, coll: CollectiveId): F[Int] = { val loadFiles = for { ra <- RAttachment.findByIdAndCollective(attachId, coll).map(_.map(_.fileId)) rs <- RAttachmentSource.findByIdAndCollective(attachId, coll).map(_.map(_.fileId)) @@ -138,7 +138,7 @@ object QAttachment { def deleteItemAttachments[F[_]: Sync]( store: Store[F] - )(itemId: Ident, coll: Ident): F[Int] = { + )(itemId: Ident, coll: CollectiveId): F[Int] = { val logger = docspell.logging.getLogger[F] for { ras <- store.transact(RAttachment.findByItemAndCollective(itemId, coll)) @@ -151,7 +151,10 @@ object QAttachment { } yield ns.sum } - def getMetaProposals(itemId: Ident, coll: Ident): ConnectionIO[MetaProposalList] = { + def getMetaProposals( + itemId: Ident, + coll: CollectiveId + ): ConnectionIO[MetaProposalList] = { val qa = Select( select(am.proposals), from(am) @@ -177,7 +180,7 @@ object QAttachment { def getAttachmentMeta( attachId: Ident, - collective: Ident + collective: CollectiveId ): ConnectionIO[Option[RAttachmentMeta]] = { val q = Select( select(am.all), @@ -204,14 +207,14 @@ object QAttachment { case class ContentAndName( id: Ident, item: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident], lang: Language, name: Option[String], content: Option[String] ) def allAttachmentMetaAndName( - coll: Option[Ident], + coll: Option[CollectiveId], itemIds: Option[Nel[Ident]], itemStates: Nel[ItemState], chunkSize: Int @@ -237,5 +240,4 @@ object QAttachment { ).build .query[ContentAndName] .streamWithChunkSize(chunkSize) - } diff --git a/modules/store/src/main/scala/docspell/store/queries/QCollective.scala b/modules/store/src/main/scala/docspell/store/queries/QCollective.scala index 84d9c8fe..fc2740c3 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QCollective.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QCollective.scala @@ -30,7 +30,7 @@ object QCollective { val empty = Names(Vector.empty, Vector.empty, Vector.empty) } - def allNames(collective: Ident, maxEntries: Int): ConnectionIO[Names] = { + def allNames(collective: CollectiveId, maxEntries: Int): ConnectionIO[Names] = { val created = Column[Timestamp]("created", TableDef("")) union( Select( @@ -70,7 +70,7 @@ object QCollective { tags: List[TagCount] ) - def getInsights(coll: Ident): ConnectionIO[InsightData] = { + def getInsights(coll: CollectiveId): ConnectionIO[InsightData] = { val q0 = Select( count(i.id).s, from(i), @@ -120,7 +120,7 @@ object QCollective { } yield InsightData(incoming, outgoing, deleted, size.getOrElse(0L), tags) } - def tagCloud(coll: Ident): ConnectionIO[List[TagCount]] = { + def tagCloud(coll: CollectiveId): ConnectionIO[List[TagCount]] = { val sql = Select( select(t.all).append(count(ti.itemId).s), @@ -132,7 +132,7 @@ object QCollective { } def getContacts( - coll: Ident, + coll: CollectiveId, query: Option[String], kind: Option[ContactKind] ): Stream[ConnectionIO, RContact] = { diff --git a/modules/store/src/main/scala/docspell/store/queries/QCustomField.scala b/modules/store/src/main/scala/docspell/store/queries/QCustomField.scala index b7a2230f..6b6a3042 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QCustomField.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QCustomField.scala @@ -23,17 +23,20 @@ object QCustomField { final case class CustomFieldData(field: RCustomField, usageCount: Int) def findAllLike( - coll: Ident, + coll: CollectiveId, nameQuery: Option[String], order: RCustomField.Table => Nel[OrderBy] ): ConnectionIO[Vector[CustomFieldData]] = findFragment(coll, nameQuery, None, order).build.query[CustomFieldData].to[Vector] - def findById(field: Ident, collective: Ident): ConnectionIO[Option[CustomFieldData]] = + def findById( + field: Ident, + collective: CollectiveId + ): ConnectionIO[Option[CustomFieldData]] = findFragment(collective, None, field.some).build.query[CustomFieldData].option private def findFragment( - coll: Ident, + coll: CollectiveId, nameQuery: Option[String], fieldId: Option[Ident], order: RCustomField.Table => Nel[OrderBy] = t => Nel.of(t.name.asc) @@ -69,5 +72,4 @@ object QCustomField { .query[FieldValue] .to[List] } - } diff --git a/modules/store/src/main/scala/docspell/store/queries/QFolder.scala b/modules/store/src/main/scala/docspell/store/queries/QFolder.scala index e5f5a1e8..2f025709 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QFolder.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QFolder.scala @@ -54,7 +54,7 @@ object QFolder { def exists: FolderChangeResult = Exists } - def delete(id: Ident, account: AccountId): ConnectionIO[FolderChangeResult] = { + def delete(id: Ident, userId: Ident): ConnectionIO[FolderChangeResult] = { def tryDelete = for { _ <- RItem.removeFolder(id) @@ -64,10 +64,9 @@ object QFolder { } yield FolderChangeResult.success (for { - uid <- OptionT(findUserId(account)) folder <- OptionT(RFolder.findById(id)) res <- OptionT.liftF( - if (folder.owner == uid) tryDelete + if (folder.owner == userId) tryDelete else FolderChangeResult.forbidden.pure[ConnectionIO] ) } yield res).getOrElse(FolderChangeResult.notFound) @@ -75,7 +74,7 @@ object QFolder { def changeName( folder: Ident, - account: AccountId, + userId: Ident, name: String ): ConnectionIO[FolderChangeResult] = { def tryUpdate(ns: RFolder): ConnectionIO[FolderChangeResult] = @@ -87,10 +86,9 @@ object QFolder { } yield res (for { - uid <- OptionT(findUserId(account)) folder <- OptionT(RFolder.findById(folder)) res <- OptionT.liftF( - if (folder.owner == uid) tryUpdate(folder.copy(name = name)) + if (folder.owner == userId) tryUpdate(folder.copy(name = name)) else FolderChangeResult.forbidden.pure[ConnectionIO] ) } yield res).getOrElse(FolderChangeResult.notFound) @@ -98,7 +96,7 @@ object QFolder { def removeMember( folder: Ident, - account: AccountId, + userId: Ident, member: Ident ): ConnectionIO[FolderChangeResult] = { def tryRemove: ConnectionIO[FolderChangeResult] = @@ -110,10 +108,9 @@ object QFolder { } yield res (for { - uid <- OptionT(findUserId(account)) folder <- OptionT(RFolder.findById(folder)) res <- OptionT.liftF( - if (folder.owner == uid) tryRemove + if (folder.owner == userId) tryRemove else FolderChangeResult.forbidden.pure[ConnectionIO] ) } yield res).getOrElse(FolderChangeResult.notFound) @@ -121,7 +118,7 @@ object QFolder { def addMember( folder: Ident, - account: AccountId, + userId: Ident, member: Ident ): ConnectionIO[FolderChangeResult] = { def tryAdd: ConnectionIO[FolderChangeResult] = @@ -134,16 +131,19 @@ object QFolder { } yield res (for { - uid <- OptionT(findUserId(account)) folder <- OptionT(RFolder.findById(folder)) res <- OptionT.liftF( - if (folder.owner == uid) tryAdd + if (folder.owner == userId) tryAdd else FolderChangeResult.forbidden.pure[ConnectionIO] ) } yield res).getOrElse(FolderChangeResult.notFound) } - def findById(id: Ident, account: AccountId): ConnectionIO[Option[FolderDetail]] = { + def findById( + id: Ident, + collectiveId: CollectiveId, + userId: Ident + ): ConnectionIO[Option[FolderDetail]] = { val user = RUser.as("u") val member = RFolderMember.as("m") val folder = RFolder.as("s") @@ -153,12 +153,19 @@ object QFolder { from(member) .innerJoin(user, member.user === user.uid) .innerJoin(folder, member.folder === folder.id), - member.folder === id && folder.collective === account.collective + member.folder === id && folder.collective === collectiveId ).query[IdRef].to[Vector] (for { folder <- OptionT( - findAll(account, Some(id), None, None, (ft, _) => Nel.of(ft.name.asc)) + findAll( + collectiveId, + userId, + Some(id), + None, + None, + (ft, _) => Nel.of(ft.name.asc) + ) .map(_.headOption) ) memb <- OptionT.liftF(memberQ) @@ -166,7 +173,8 @@ object QFolder { } def findAll( - account: AccountId, + collectiveId: CollectiveId, + userId: Ident, idQ: Option[Ident], ownerLogin: Option[Ident], nameQ: Option[String], @@ -199,22 +207,20 @@ object QFolder { val folder = RFolder.as("s") val memlogin = TableDef("memberlogin") val mlFolder = Column[Ident]("folder", memlogin) - val mlLogin = Column[Ident]("login", memlogin) + val mlUser = Column[Ident]("user_id", memlogin) withCte( memlogin -> union( Select( - select(member.folder.as(mlFolder), user.login.as(mlLogin)), + select(member.folder.as(mlFolder), member.user.as(mlUser)), from(member) - .innerJoin(user, user.uid === member.user) .innerJoin(folder, folder.id === member.folder), - folder.collective === account.collective + folder.collective === collectiveId ), Select( - select(folder.id.as(mlFolder), user.login.as(mlLogin)), - from(folder) - .innerJoin(user, user.uid === folder.owner), - folder.collective === account.collective + select(folder.id.as(mlFolder), folder.owner.as(mlUser)), + from(folder), + folder.collective === collectiveId ) ) )( @@ -228,7 +234,7 @@ object QFolder { Select( select(countAll > 0), from(memlogin), - mlFolder === folder.id && mlLogin === account.user + mlFolder === folder.id && mlUser === userId ).as("member"), Select( select(countAll - 1), @@ -239,7 +245,7 @@ object QFolder { from(folder) .innerJoin(user, user.uid === folder.owner), where( - folder.collective === account.collective &&? + folder.collective === collectiveId &&? idQ.map(id => folder.id === id) &&? nameQ.map(q => folder.name.like(s"%${q.toLowerCase}%")) &&? ownerLogin.map(login => user.login === login) @@ -249,7 +255,7 @@ object QFolder { } /** Select all folder_id where the given account is member or owner. */ - def findMemberFolderIds(account: AccountId): Select = { + def findMemberFolderIds(cid: CollectiveId, userId: Ident): Select = { val user = RUser.as("u") val f = RFolder.as("f") val m = RFolderMember.as("m") @@ -257,21 +263,21 @@ object QFolder { Select( select(f.id), from(f).innerJoin(user, f.owner === user.uid), - f.collective === account.collective && user.login === account.user + f.collective === cid && user.uid === userId ), Select( select(m.folder), from(m) .innerJoin(f, f.id === m.folder) .innerJoin(user, user.uid === m.user), - f.collective === account.collective && user.login === account.user + f.collective === cid && user.uid === userId ) ) } - def getMemberFolders(account: AccountId): ConnectionIO[Set[Ident]] = - findMemberFolderIds(account).build.query[Ident].to[Set] - - private def findUserId(account: AccountId): ConnectionIO[Option[Ident]] = - RUser.findByAccount(account).map(_.map(_.uid)) + def getMemberFolders( + collectiveId: CollectiveId, + userId: Ident + ): ConnectionIO[Set[Ident]] = + findMemberFolderIds(collectiveId, userId).build.query[Ident].to[Set] } diff --git a/modules/store/src/main/scala/docspell/store/queries/QItem.scala b/modules/store/src/main/scala/docspell/store/queries/QItem.scala index 172d38a9..5234940a 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QItem.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QItem.scala @@ -64,7 +64,7 @@ object QItem extends FtsSupport { val cteFts = ftsTable.map(cteTable) val sql = findItemsBase(q.fix, today, maxNoteLen, cteFts) - .changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond)) + .changeWhere(c => c && queryCondition(today, q.fix.account.collectiveId, q.cond)) .joinFtsDetails(i, ftsTable) .limit(batch) .build @@ -73,7 +73,7 @@ object QItem extends FtsSupport { sql.query[ListItem].stream } - def findItem(id: Ident, collective: Ident): ConnectionIO[Option[ItemData]] = { + def findItem(id: Ident, collective: CollectiveId): ConnectionIO[Option[ItemData]] = { val cq = Select( select(i.all, org.all, pers0.all, pers1.all, equip.all) @@ -121,7 +121,10 @@ object QItem extends FtsSupport { ) } - def findRelatedItems(id: Ident, collective: Ident): ConnectionIO[Vector[ListItem]] = + def findRelatedItems( + id: Ident, + collective: CollectiveId + ): ConnectionIO[Vector[ListItem]] = RItemLink .findLinked(collective, id) .map(v => Nel.fromList(v.toList)) @@ -131,7 +134,8 @@ object QItem extends FtsSupport { case Some(nel) => val expr = ItemQuery.Expr.and(ValidItemStates, ItemQueryDsl.Q.itemIdsIn(nel.map(_.id))) - val account = AccountId(collective, Ident.unsafe("")) + val account = + AccountInfo(collective, Ident.unsafe(""), Ident.unsafe(""), Ident.unsafe("")) findItemsBase( Query.Fix(account, Some(expr), None), @@ -159,7 +163,7 @@ object QItem extends FtsSupport { noteMaxLen: Int, ftsTable: Option[RFtsResult.Table] ): Select.Ordered = { - val coll = q.account.collective + val coll = q.account.collectiveId Select( select( @@ -197,7 +201,9 @@ object QItem extends FtsSupport { i.cid === coll &&? q.query.map(qs => queryCondFromExpr(today, coll, qs)) && or( i.folder.isNull, - i.folder.in(QFolder.findMemberFolderIds(q.account)) + i.folder.in( + QFolder.findMemberFolderIds(q.account.collectiveId, q.account.userId) + ) ) ) ).orderBy( @@ -223,7 +229,7 @@ object QItem extends FtsSupport { from.innerJoin(meta, meta.id === as.fileId) } ) - .changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond)) + .changeWhere(c => c && queryCondition(today, q.fix.account.collectiveId, q.cond)) .limit(maxFiles) def findFiles( @@ -288,12 +294,20 @@ object QItem extends FtsSupport { .streamWithChunkSize(chunkSize) } - def queryCondFromExpr(today: LocalDate, coll: Ident, q: ItemQuery.Expr): Condition = { + def queryCondFromExpr( + today: LocalDate, + coll: CollectiveId, + q: ItemQuery.Expr + ): Condition = { val tables = Tables(i, org, pers0, pers1, equip, f, a, m, AttachCountTable("cta")) ItemQueryGenerator.fromExpr(today, tables, coll)(q) } - def queryCondition(today: LocalDate, coll: Ident, cond: Query.QueryCond): Condition = + def queryCondition( + today: LocalDate, + coll: CollectiveId, + cond: Query.QueryCond + ): Condition = cond match { case Query.QueryExpr(Some(expr)) => queryCondFromExpr(today, coll, expr) @@ -340,7 +354,7 @@ object QItem extends FtsSupport { .joinFtsIdOnly(i, ftsTable) .withSelect(select(tag.category).append(countDistinct(i.id).as("num"))) .changeFrom(_.prepend(tagFrom)) - .changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond)) + .changeWhere(c => c && queryCondition(today, q.fix.account.collectiveId, q.cond)) .groupBy(tag.category) .build .query[CategoryCount] @@ -348,7 +362,7 @@ object QItem extends FtsSupport { for { existing <- catCloud - allCats <- RTag.listCategories(q.fix.account.collective) + allCats <- RTag.listCategories(q.fix.account.collectiveId) other = allCats.diff(existing.flatMap(_.category)) } yield existing ++ other.map(n => CategoryCount(n.some, 0)) } @@ -366,7 +380,7 @@ object QItem extends FtsSupport { .joinFtsIdOnly(i, ftsTable) .withSelect(select(tag.all).append(countDistinct(i.id).as("num"))) .changeFrom(_.prepend(tagFrom)) - .changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond)) + .changeWhere(c => c && queryCondition(today, q.fix.account.collectiveId, q.cond)) .groupBy(tag.tid) .build .query[TagCount] @@ -376,7 +390,7 @@ object QItem extends FtsSupport { // are not included they are fetched separately for { existing <- tagCloud - other <- RTag.findOthers(q.fix.account.collective, existing.map(_.tag.tagId)) + other <- RTag.findOthers(q.fix.account.collectiveId, existing.map(_.tag.tagId)) } yield existing ++ other.map(TagCount(_, 0)) } @@ -386,7 +400,7 @@ object QItem extends FtsSupport { findItemsBase(q.fix, today, 0, None).unwrap .joinFtsIdOnly(i, ftsTable) .withSelect(Nel.of(count(i.id).as("num"))) - .changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond)) + .changeWhere(c => c && queryCondition(today, q.fix.account.collectiveId, q.cond)) .build .query[Int] .unique @@ -422,7 +436,7 @@ object QItem extends FtsSupport { .joinFtsIdOnly(i, ftsTable) .withSelect(select(idCol, nameCol).append(count(idCol).as("num"))) .changeWhere(c => - c && fkCol.isNotNull && queryCondition(today, q.fix.account.collective, q.cond) + c && fkCol.isNotNull && queryCondition(today, q.fix.account.collectiveId, q.cond) ) .groupBy(idCol, nameCol) .build @@ -437,7 +451,7 @@ object QItem extends FtsSupport { .joinFtsIdOnly(i, ftsTable) .withSelect(select(f.id, f.name, f.owner, fu.login).append(count(i.id).as("num"))) .changeFrom(_.innerJoin(fu, fu.uid === f.owner)) - .changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond)) + .changeWhere(c => c && queryCondition(today, q.fix.account.collectiveId, q.cond)) .groupBy(f.id, f.name, f.owner, fu.login) .build .query[FolderCount] @@ -455,7 +469,7 @@ object QItem extends FtsSupport { val base = findItemsBase(q.fix, today, 0, None).unwrap .changeFrom(_.prepend(fieldJoin)) - .changeWhere(c => c && queryCondition(today, q.fix.account.collective, q.cond)) + .changeWhere(c => c && queryCondition(today, q.fix.account.collectiveId, q.cond)) .ftsCondition(i, ftsTable) .groupBy(GroupBy(cf.all)) @@ -507,7 +521,7 @@ object QItem extends FtsSupport { * implemented by running an additional query per item. */ def findItemsWithTags( - collective: Ident, + collective: CollectiveId, search: Stream[ConnectionIO, ListItem] ): Stream[ConnectionIO, ListItemWithTags] = { def findTag( @@ -555,7 +569,9 @@ object QItem extends FtsSupport { a.itemId === item ).build.query[AttachmentLight].to[List] - def delete[F[_]: Sync](store: Store[F])(itemId: Ident, collective: Ident): F[Int] = + def delete[F[_]: Sync]( + store: Store[F] + )(itemId: Ident, collective: CollectiveId): F[Int] = for { rn <- QAttachment.deleteItemAttachments(store)(itemId, collective) tn <- store.transact(RTagItem.deleteItemTags(itemId)) @@ -607,7 +623,7 @@ object QItem extends FtsSupport { def findByChecksum( checksum: String, - collective: Ident, + collective: CollectiveId, excludeFileMeta: Set[FileKey] ): ConnectionIO[Vector[RItem]] = { val qq = findByChecksumQuery(checksum, collective, excludeFileMeta).build @@ -617,7 +633,7 @@ object QItem extends FtsSupport { def findByChecksumQuery( checksum: String, - collective: Ident, + collective: CollectiveId, excludeFileMeta: Set[FileKey] ): Select = { val m1 = RFileMeta.as("m1") @@ -657,7 +673,7 @@ object QItem extends FtsSupport { language: Language ) def allNameAndNotes( - coll: Option[Ident], + coll: Option[CollectiveId], itemIds: Option[Nel[Ident]], chunkSize: Int ): Stream[ConnectionIO, NameAndNotes] = { @@ -677,7 +693,7 @@ object QItem extends FtsSupport { } def findAllNewesFirst( - collective: Ident, + collective: CollectiveId, chunkSize: Int, limit: Batch ): Stream[ConnectionIO, Ident] = { @@ -691,7 +707,7 @@ object QItem extends FtsSupport { } def resolveTextAndTag( - collective: Ident, + collective: CollectiveId, itemId: Ident, tagCategory: String, maxLen: Int, @@ -724,7 +740,7 @@ object QItem extends FtsSupport { } def resolveTextAndCorrOrg( - collective: Ident, + collective: CollectiveId, itemId: Ident, maxLen: Int, pageSep: String @@ -741,7 +757,7 @@ object QItem extends FtsSupport { } def resolveTextAndCorrPerson( - collective: Ident, + collective: CollectiveId, itemId: Ident, maxLen: Int, pageSep: String @@ -758,7 +774,7 @@ object QItem extends FtsSupport { } def resolveTextAndConcPerson( - collective: Ident, + collective: CollectiveId, itemId: Ident, maxLen: Int, pageSep: String @@ -775,7 +791,7 @@ object QItem extends FtsSupport { } def resolveTextAndConcEquip( - collective: Ident, + collective: CollectiveId, itemId: Ident, maxLen: Int, pageSep: String @@ -797,12 +813,12 @@ object QItem extends FtsSupport { m.content.s } else substring(m.content.s, 0, maxLen).s - private def readTextAndTag(collective: Ident, itemId: Ident, pageSep: String)( + private def readTextAndTag(collective: CollectiveId, itemId: Ident, pageSep: String)( q: Select ): ConnectionIO[TextAndTag] = for { _ <- logger.trace( - s"query: $q (${itemId.id}, ${collective.id})" + s"query: $q (${itemId.id}, ${collective.value})" ) texts <- q.build.query[(String, Option[TextAndTag.TagName])].to[List] _ <- logger.trace( diff --git a/modules/store/src/main/scala/docspell/store/queries/QLogin.scala b/modules/store/src/main/scala/docspell/store/queries/QLogin.scala index 280ab131..c79c4a8d 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QLogin.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QLogin.scala @@ -21,7 +21,7 @@ object QLogin { private[this] val logger = docspell.logging.getLogger[ConnectionIO] case class Data( - account: AccountId, + account: AccountInfo, password: Password, collectiveState: CollectiveState, userState: UserState, @@ -35,7 +35,16 @@ object QLogin { val coll = RCollective.as("c") val sql = Select( - select(user.cid, user.login, user.password, coll.state, user.state, user.source), + select( + coll.id, + coll.name, + user.uid, + user.login, + user.password, + coll.state, + user.state, + user.source + ), from(user).innerJoin(coll, user.cid === coll.id), where(user, coll) ).build @@ -44,7 +53,7 @@ object QLogin { } def findUser(acc: AccountId): ConnectionIO[Option[Data]] = - findUser0((user, _) => user.login === acc.user && user.cid === acc.collective) + findUser0((user, coll) => user.login === acc.user && coll.name === acc.collective) def findUser(userId: Ident): ConnectionIO[Option[Data]] = findUser0((user, _) => user.uid === userId) diff --git a/modules/store/src/main/scala/docspell/store/queries/QMails.scala b/modules/store/src/main/scala/docspell/store/queries/QMails.scala index f0081492..43156254 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QMails.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QMails.scala @@ -23,21 +23,27 @@ object QMails { private val mailitem = RSentMailItem.as("mi") private val user = RUser.as("u") - def delete(coll: Ident, mailId: Ident): ConnectionIO[Int] = + def delete(coll: CollectiveId, mailId: Ident): ConnectionIO[Int] = (for { m <- OptionT(findMail(coll, mailId)) k <- OptionT.liftF(RSentMailItem.deleteMail(mailId)) n <- OptionT.liftF(RSentMail.delete(m._1.id)) } yield k + n).getOrElse(0) - def findMail(coll: Ident, mailId: Ident): ConnectionIO[Option[(RSentMail, Ident)]] = + def findMail( + coll: CollectiveId, + mailId: Ident + ): ConnectionIO[Option[(RSentMail, Ident)]] = partialFind .where(smail.id === mailId && item.cid === coll) .build .query[(RSentMail, Ident)] .option - def findMails(coll: Ident, itemId: Ident): ConnectionIO[Vector[(RSentMail, Ident)]] = + def findMails( + coll: CollectiveId, + itemId: Ident + ): ConnectionIO[Vector[(RSentMail, Ident)]] = partialFind .where(mailitem.itemId === itemId && item.cid === coll) .orderBy(smail.created.desc) @@ -53,5 +59,4 @@ object QMails { .innerJoin(item, mailitem.itemId === item.id) .innerJoin(user, user.uid === smail.uid) ) - } diff --git a/modules/store/src/main/scala/docspell/store/queries/QNotification.scala b/modules/store/src/main/scala/docspell/store/queries/QNotification.scala index e7f0175b..b03eb187 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QNotification.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QNotification.scala @@ -26,7 +26,7 @@ object QNotification { def findChannelsForEvent(event: Event): ConnectionIO[Vector[HookChannel]] = for { - hooks <- listHooks(event.account.collective, event.eventType) + hooks <- listHooks(event.account.collectiveId, event.eventType) chs <- hooks.traverse(h => listChannels(h.id) .flatMap(_.flatTraverse(hc => readHookChannel(h.uid, hc))) @@ -42,7 +42,7 @@ object QNotification { ) def listHooks( - collective: Ident, + collective: CollectiveId, eventType: EventType ): ConnectionIO[Vector[RNotificationHook]] = run( diff --git a/modules/store/src/main/scala/docspell/store/queries/QOrganization.scala b/modules/store/src/main/scala/docspell/store/queries/QOrganization.scala index 10c457a8..9647f46c 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QOrganization.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QOrganization.scala @@ -25,7 +25,7 @@ object QOrganization { private val org = ROrganization.as("o") def findOrgAndContact( - coll: Ident, + coll: CollectiveId, query: Option[String], order: ROrganization.Table => Nel[OrderBy] ): Stream[ConnectionIO, (ROrganization, Vector[RContact])] = { @@ -50,7 +50,7 @@ object QOrganization { } def getOrgAndContact( - coll: Ident, + coll: CollectiveId, orgId: Ident ): ConnectionIO[Option[(ROrganization, Vector[RContact])]] = { val sql = run( @@ -72,7 +72,7 @@ object QOrganization { } def findPersonAndContact( - coll: Ident, + coll: CollectiveId, query: Option[String], order: (RPerson.Table, ROrganization.Table) => Nel[OrderBy] ): Stream[ConnectionIO, (RPerson, Option[ROrganization], Vector[RContact])] = { @@ -99,7 +99,7 @@ object QOrganization { } def getPersonAndContact( - coll: Ident, + coll: CollectiveId, persId: Ident ): ConnectionIO[Option[(RPerson, Option[ROrganization], Vector[RContact])]] = { val sql = @@ -125,7 +125,7 @@ object QOrganization { } def findPersonByContact( - coll: Ident, + coll: CollectiveId, value: String, ck: Option[ContactKind], use: Option[Nel[PersonUse]] @@ -141,7 +141,7 @@ object QOrganization { def addOrg[F[_]]( org: ROrganization, contacts: Seq[RContact], - cid: Ident + cid: CollectiveId ): Store[F] => F[AddResult] = { val insert = for { n <- ROrganization.insert(org) @@ -156,7 +156,7 @@ object QOrganization { def addPerson[F[_]]( person: RPerson, contacts: Seq[RContact], - cid: Ident + cid: CollectiveId ): Store[F] => F[AddResult] = { val insert = for { n <- RPerson.insert(person) @@ -171,7 +171,7 @@ object QOrganization { def updateOrg[F[_]]( org: ROrganization, contacts: Seq[RContact], - cid: Ident + cid: CollectiveId ): Store[F] => F[AddResult] = { val insert = for { n <- ROrganization.update(org) @@ -187,7 +187,7 @@ object QOrganization { def updatePerson[F[_]]( person: RPerson, contacts: Seq[RContact], - cid: Ident + cid: CollectiveId ): Store[F] => F[AddResult] = { val insert = for { n <- RPerson.update(person) @@ -200,7 +200,7 @@ object QOrganization { store => store.add(insert, exists) } - def deleteOrg(orgId: Ident, collective: Ident): ConnectionIO[Int] = + def deleteOrg(orgId: Ident, collective: CollectiveId): ConnectionIO[Int] = for { n0 <- RItem.removeCorrOrg(collective, orgId) n1 <- RContact.deleteOrg(orgId) @@ -208,7 +208,7 @@ object QOrganization { n3 <- ROrganization.delete(orgId, collective) } yield n0 + n1 + n2 + n3 - def deletePerson(personId: Ident, collective: Ident): ConnectionIO[Int] = + def deletePerson(personId: Ident, collective: CollectiveId): ConnectionIO[Int] = for { n0 <- RItem.removeCorrPerson(collective, personId) n1 <- RItem.removeConcPerson(collective, personId) diff --git a/modules/store/src/main/scala/docspell/store/queries/QUser.scala b/modules/store/src/main/scala/docspell/store/queries/QUser.scala index a9670c46..f366eb51 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QUser.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QUser.scala @@ -24,40 +24,35 @@ object QUser { shares: Int ) - def getUserData(accountId: AccountId): ConnectionIO[UserData] = { + def getUserData(cid: CollectiveId, uid: Ident): ConnectionIO[UserData] = { val folder = RFolder.as("f") val mail = RSentMail.as("m") val mitem = RSentMailItem.as("mi") - val user = RUser.as("u") val share = RShare.as("s") for { - uid <- loadUserId(accountId).map(_.getOrElse(Ident.unsafe(""))) folders <- run( select(folder.name), from(folder), - folder.owner === uid && folder.collective === accountId.collective + folder.owner === uid && folder.collective === cid ).query[Ident].to[List] mails <- run( select(count(mail.id)), from(mail) - .innerJoin(mitem, mail.id === mitem.sentMailId) - .innerJoin(user, user.uid === mail.uid), - user.login === accountId.user && user.cid === accountId.collective + .innerJoin(mitem, mail.id === mitem.sentMailId), + mail.uid === uid ).query[Int].unique shares <- run( select(count(share.id)), - from(share) - .innerJoin(user, user.uid === share.userId), - user.login === accountId.user && user.cid === accountId.collective + from(share), + share.userId === uid ).query[Int].unique } yield UserData(folders, mails, shares) } - def deleteUserAndData(accountId: AccountId): ConnectionIO[Int] = + def deleteUserAndData(uid: Ident): ConnectionIO[Int] = for { - uid <- loadUserId(accountId).map(_.getOrElse(Ident.unsafe(""))) - _ <- logger.info(s"Remove user ${accountId.asString} (uid=${uid.id})") + _ <- logger.info(s"Remove user ${uid.id}") n1 <- deleteUserFolders(uid) @@ -125,8 +120,4 @@ object QUser { n2 <- DML.delete(imap, imap.uid === uid) } yield n1 + n2 } - - private def loadUserId(id: AccountId): ConnectionIO[Option[Ident]] = - RUser.findIdByAccount(id) - } diff --git a/modules/store/src/main/scala/docspell/store/queries/Query.scala b/modules/store/src/main/scala/docspell/store/queries/Query.scala index 98c86d23..074d654f 100644 --- a/modules/store/src/main/scala/docspell/store/queries/Query.scala +++ b/modules/store/src/main/scala/docspell/store/queries/Query.scala @@ -58,7 +58,7 @@ object Query { Query(fix, QueryExpr(None)) case class Fix( - account: AccountId, + account: AccountInfo, query: Option[ItemQuery.Expr], order: Option[OrderSelect => OrderBy] ) { @@ -87,7 +87,7 @@ object Query { QueryExpr(Some(q)) } - def all(account: AccountId): Query = + def all(account: AccountInfo): Query = Query(Fix(account, None, None), QueryExpr(None)) } diff --git a/modules/store/src/main/scala/docspell/store/records/AddonRunConfigData.scala b/modules/store/src/main/scala/docspell/store/records/AddonRunConfigData.scala index 69c6826a..39e4aff6 100644 --- a/modules/store/src/main/scala/docspell/store/records/AddonRunConfigData.scala +++ b/modules/store/src/main/scala/docspell/store/records/AddonRunConfigData.scala @@ -11,7 +11,7 @@ import cats.syntax.all._ import fs2.Stream import docspell.addons.AddonTriggerType -import docspell.common.{Ident, Timestamp} +import docspell.common.{CollectiveId, Ident, Timestamp} import docspell.store.qb.DSL._ import docspell.store.qb._ @@ -26,7 +26,7 @@ case class AddonRunConfigData( object AddonRunConfigData { def findAll( - cid: Ident, + cid: CollectiveId, enabled: Option[Boolean] = None, trigger: Set[AddonTriggerType] = Set.empty, configIds: Set[Ident] = Set.empty @@ -88,7 +88,7 @@ object AddonRunConfigData { } yield n1 + tts.sum + tas.sum def findEnabledRef( - cid: Ident, + cid: CollectiveId, taskId: Ident ): ConnectionIO[List[(RAddonArchive, RAddonRunConfigAddon)]] = { val run = RAddonRunConfig.as("run") @@ -108,7 +108,7 @@ object AddonRunConfigData { } def findEnabledRefs( - cid: Ident, + cid: CollectiveId, trigger: AddonTriggerType, addonTaskIds: Set[Ident] ): Stream[ConnectionIO, (RAddonRunConfig, List[(RAddonArchive, String)])] = { diff --git a/modules/store/src/main/scala/docspell/store/records/AddonRunConfigResolved.scala b/modules/store/src/main/scala/docspell/store/records/AddonRunConfigResolved.scala index 94ddb6bc..19eb474f 100644 --- a/modules/store/src/main/scala/docspell/store/records/AddonRunConfigResolved.scala +++ b/modules/store/src/main/scala/docspell/store/records/AddonRunConfigResolved.scala @@ -46,7 +46,7 @@ object AddonRunConfigResolved { def findById( configId: Ident, - collective: Ident, + collective: CollectiveId, enabled: Option[Boolean] ): ConnectionIO[Option[AddonRunConfigResolved]] = (for { @@ -56,7 +56,7 @@ object AddonRunConfigResolved { } yield AddonRunConfigResolved(cfg, refs, tri)).value def findAllForCollective( - cid: Ident, + cid: CollectiveId, enabled: Option[Boolean], trigger: Set[AddonTriggerType], configIds: Set[Ident] diff --git a/modules/store/src/main/scala/docspell/store/records/RAddonArchive.scala b/modules/store/src/main/scala/docspell/store/records/RAddonArchive.scala index 05e2fc15..3411743b 100644 --- a/modules/store/src/main/scala/docspell/store/records/RAddonArchive.scala +++ b/modules/store/src/main/scala/docspell/store/records/RAddonArchive.scala @@ -21,7 +21,7 @@ import io.circe.{Decoder, Encoder} final case class RAddonArchive( id: Ident, - cid: Ident, + cid: CollectiveId, fileId: FileKey, originalUrl: Option[LenientUri], name: String, @@ -32,7 +32,7 @@ final case class RAddonArchive( ) { def nameAndVersion: String = - s"${name}-${version}" + s"$name-$version" def isUnchanged(meta: AddonMeta): Boolean = name == meta.meta.name && @@ -60,7 +60,7 @@ object RAddonArchive { val tableName = "addon_archive" val id = Column[Ident]("id", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val fileId = Column[FileKey]("file_id", this) val originalUrl = Column[LenientUri]("original_url", this) val name = Column[String]("name", this) @@ -85,7 +85,7 @@ object RAddonArchive { def apply( id: Ident, - cid: Ident, + cid: CollectiveId, fileId: FileKey, originalUrl: Option[LenientUri], meta: AddonMeta, @@ -116,14 +116,14 @@ object RAddonArchive { else DML.insert(T, T.all, values) } - def existsByUrl(cid: Ident, url: LenientUri): ConnectionIO[Boolean] = + def existsByUrl(cid: CollectiveId, url: LenientUri): ConnectionIO[Boolean] = Select( select(count(T.id)), from(T), T.cid === cid && T.originalUrl === url ).build.query[Int].unique.map(_ > 0) - def findByUrl(cid: Ident, url: LenientUri): ConnectionIO[Option[RAddonArchive]] = + def findByUrl(cid: CollectiveId, url: LenientUri): ConnectionIO[Option[RAddonArchive]] = Select( select(T.all), from(T), @@ -131,7 +131,7 @@ object RAddonArchive { ).build.query[RAddonArchive].option def findByNameAndVersion( - cid: Ident, + cid: CollectiveId, name: String, version: String ): ConnectionIO[Option[RAddonArchive]] = @@ -141,14 +141,17 @@ object RAddonArchive { T.cid === cid && T.name === name && T.version === version ).build.query[RAddonArchive].option - def findById(cid: Ident, id: Ident): ConnectionIO[Option[RAddonArchive]] = + def findById(cid: CollectiveId, id: Ident): ConnectionIO[Option[RAddonArchive]] = Select( select(T.all), from(T), T.cid === cid && T.id === id ).build.query[RAddonArchive].option - def findByIds(cid: Ident, ids: NonEmptyList[Ident]): ConnectionIO[List[RAddonArchive]] = + def findByIds( + cid: CollectiveId, + ids: NonEmptyList[Ident] + ): ConnectionIO[List[RAddonArchive]] = Select( select(T.all), from(T), @@ -169,14 +172,14 @@ object RAddonArchive { ) ) - def listAll(cid: Ident): ConnectionIO[List[RAddonArchive]] = + def listAll(cid: CollectiveId): ConnectionIO[List[RAddonArchive]] = Select( select(T.all), from(T), T.cid === cid ).orderBy(T.name.asc).build.query[RAddonArchive].to[List] - def deleteById(cid: Ident, id: Ident): ConnectionIO[Int] = + def deleteById(cid: CollectiveId, id: Ident): ConnectionIO[Int] = DML.delete(T, T.cid === cid && T.id === id) implicit val jsonDecoder: Decoder[RAddonArchive] = deriveDecoder diff --git a/modules/store/src/main/scala/docspell/store/records/RAddonRunConfig.scala b/modules/store/src/main/scala/docspell/store/records/RAddonRunConfig.scala index 70460aaa..d6b04ef7 100644 --- a/modules/store/src/main/scala/docspell/store/records/RAddonRunConfig.scala +++ b/modules/store/src/main/scala/docspell/store/records/RAddonRunConfig.scala @@ -18,7 +18,7 @@ import doobie.implicits._ final case class RAddonRunConfig( id: Ident, - cid: Ident, + cid: CollectiveId, userId: Option[Ident], name: String, enabled: Boolean, @@ -30,7 +30,7 @@ object RAddonRunConfig { val tableName = "addon_run_config" val id = Column[Ident]("id", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val userId = Column[Ident]("user_id", this) val name = Column[String]("name", this) val enabled = Column[Boolean]("enabled", this) @@ -61,13 +61,13 @@ object RAddonRunConfig { ) ) - def findById(cid: Ident, id: Ident): ConnectionIO[Option[RAddonRunConfig]] = + def findById(cid: CollectiveId, id: Ident): ConnectionIO[Option[RAddonRunConfig]] = Select(select(T.all), from(T), T.cid === cid && T.id === id).build .query[RAddonRunConfig] .option def findByCollective( - cid: Ident, + cid: CollectiveId, enabled: Option[Boolean], trigger: Set[AddonTriggerType], configIds: Set[Ident] @@ -94,6 +94,6 @@ object RAddonRunConfig { selectConfigs.build.query[RAddonRunConfig].to[List] } - def deleteById(cid: Ident, id: Ident): ConnectionIO[Int] = + def deleteById(cid: CollectiveId, id: Ident): ConnectionIO[Int] = DML.delete(T, T.cid === cid && T.id === id) } diff --git a/modules/store/src/main/scala/docspell/store/records/RAttachment.scala b/modules/store/src/main/scala/docspell/store/records/RAttachment.scala index b1a9dcc9..7ebd4a50 100644 --- a/modules/store/src/main/scala/docspell/store/records/RAttachment.scala +++ b/modules/store/src/main/scala/docspell/store/records/RAttachment.scala @@ -125,7 +125,7 @@ object RAttachment { def updateName( attachId: Ident, - collective: Ident, + collective: CollectiveId, aname: Option[String] ): ConnectionIO[Int] = { val update = DML.update(T, T.id === attachId, DML.set(T.name.setTo(aname))) @@ -137,7 +137,7 @@ object RAttachment { def findByIdAndCollective( attachId: Ident, - collective: Ident + collective: CollectiveId ): ConnectionIO[Option[RAttachment]] = { val a = RAttachment.as("a") val i = RItem.as("i") @@ -153,7 +153,7 @@ object RAttachment { def existsByIdAndCollective( attachId: Ident, - collective: Ident + collective: CollectiveId ): ConnectionIO[Boolean] = { val a = RAttachment.as("a") val i = RItem.as("i") @@ -167,7 +167,7 @@ object RAttachment { def findByItemAndCollective( id: Ident, - coll: Ident + coll: CollectiveId ): ConnectionIO[Vector[RAttachment]] = { val a = RAttachment.as("a") val i = RItem.as("i") @@ -181,7 +181,7 @@ object RAttachment { def findByItemCollectiveSource( id: Ident, - coll: Ident, + coll: CollectiveId, fileIds: NonEmptyList[FileKey] ): ConnectionIO[Vector[RAttachment]] = { val i = RItem.as("i") @@ -202,7 +202,7 @@ object RAttachment { def findByItemAndCollectiveWithMeta( id: Ident, - coll: Ident + coll: CollectiveId ): ConnectionIO[Vector[(RAttachment, RFileMeta)]] = { val a = RAttachment.as("a") val m = RFileMeta.as("m") @@ -250,7 +250,7 @@ object RAttachment { } def findAll( - coll: Option[Ident], + coll: Option[CollectiveId], chunkSize: Int ): Stream[ConnectionIO, RAttachment] = { val a = RAttachment.as("a") @@ -283,7 +283,7 @@ object RAttachment { } def findWithoutPreview( - coll: Option[Ident], + coll: Option[CollectiveId], chunkSize: Int ): Stream[ConnectionIO, RAttachment] = { val a = RAttachment.as("a") @@ -299,7 +299,7 @@ object RAttachment { } def findNonConvertedPdf( - coll: Option[Ident], + coll: Option[CollectiveId], chunkSize: Int ): Stream[ConnectionIO, RAttachment] = { val pdfType = "application/pdf%" @@ -322,7 +322,7 @@ object RAttachment { def filterAttachments( attachments: NonEmptyList[Ident], - coll: Ident + coll: CollectiveId ): ConnectionIO[Vector[Ident]] = { val a = RAttachment.as("a") val i = RItem.as("i") diff --git a/modules/store/src/main/scala/docspell/store/records/RAttachmentArchive.scala b/modules/store/src/main/scala/docspell/store/records/RAttachmentArchive.scala index 3913c135..edda4830 100644 --- a/modules/store/src/main/scala/docspell/store/records/RAttachmentArchive.scala +++ b/modules/store/src/main/scala/docspell/store/records/RAttachmentArchive.scala @@ -64,7 +64,7 @@ object RAttachmentArchive { def findByIdAndCollective( attachId: Ident, - collective: Ident + collective: CollectiveId ): ConnectionIO[Option[RAttachmentArchive]] = { val b = RAttachment.as("b") val a = RAttachmentArchive.as("a") @@ -81,7 +81,7 @@ object RAttachmentArchive { def findByMessageIdAndCollective( messageIds: NonEmptyList[String], - collective: Ident + collective: CollectiveId ): ConnectionIO[Vector[RAttachmentArchive]] = { val b = RAttachment.as("b") val a = RAttachmentArchive.as("a") diff --git a/modules/store/src/main/scala/docspell/store/records/RAttachmentPreview.scala b/modules/store/src/main/scala/docspell/store/records/RAttachmentPreview.scala index 6ca4bc8e..c3d25ada 100644 --- a/modules/store/src/main/scala/docspell/store/records/RAttachmentPreview.scala +++ b/modules/store/src/main/scala/docspell/store/records/RAttachmentPreview.scala @@ -70,7 +70,7 @@ object RAttachmentPreview { def findByIdAndCollective( attachId: Ident, - collective: Ident + collective: CollectiveId ): ConnectionIO[Option[RAttachmentPreview]] = { val b = RAttachment.as("b") val a = RAttachmentPreview.as("a") @@ -98,7 +98,7 @@ object RAttachmentPreview { def findByItemAndCollective( itemId: Ident, - coll: Ident + coll: CollectiveId ): ConnectionIO[Option[RAttachmentPreview]] = { val s = RAttachmentPreview.as("s") val a = RAttachment.as("a") diff --git a/modules/store/src/main/scala/docspell/store/records/RAttachmentSource.scala b/modules/store/src/main/scala/docspell/store/records/RAttachmentSource.scala index a2e3f949..e3fdc922 100644 --- a/modules/store/src/main/scala/docspell/store/records/RAttachmentSource.scala +++ b/modules/store/src/main/scala/docspell/store/records/RAttachmentSource.scala @@ -71,7 +71,7 @@ object RAttachmentSource { def findByIdAndCollective( attachId: Ident, - collective: Ident + collective: CollectiveId ): ConnectionIO[Option[RAttachmentSource]] = { val b = RAttachment.as("b") val a = RAttachmentSource.as("a") diff --git a/modules/store/src/main/scala/docspell/store/records/RClassifierModel.scala b/modules/store/src/main/scala/docspell/store/records/RClassifierModel.scala index 89fae4df..6455a74e 100644 --- a/modules/store/src/main/scala/docspell/store/records/RClassifierModel.scala +++ b/modules/store/src/main/scala/docspell/store/records/RClassifierModel.scala @@ -19,7 +19,7 @@ import doobie.implicits._ final case class RClassifierModel( id: Ident, - cid: Ident, + cid: CollectiveId, name: String, fileId: FileKey, created: Timestamp @@ -28,7 +28,7 @@ final case class RClassifierModel( object RClassifierModel { def createNew[F[_]: Sync]( - cid: Ident, + cid: CollectiveId, name: String, fileId: FileKey ): F[RClassifierModel] = @@ -41,7 +41,7 @@ object RClassifierModel { val tableName = "classifier_model" val id = Column[Ident]("id", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val name = Column[String]("name", this) val fileId = Column[FileKey]("file_id", this) val created = Column[Timestamp]("created", this) @@ -61,7 +61,7 @@ object RClassifierModel { fr"${v.id},${v.cid},${v.name},${v.fileId},${v.created}" ) - def updateFile(coll: Ident, name: String, fid: FileKey): ConnectionIO[Int] = + def updateFile(coll: CollectiveId, name: String, fid: FileKey): ConnectionIO[Int] = for { now <- Timestamp.current[ConnectionIO] n <- DML.update( @@ -85,13 +85,16 @@ object RClassifierModel { 0.pure[ConnectionIO] } - def findByName(cid: Ident, name: String): ConnectionIO[Option[RClassifierModel]] = + def findByName( + cid: CollectiveId, + name: String + ): ConnectionIO[Option[RClassifierModel]] = Select(select(T.all), from(T), T.cid === cid && T.name === name).build .query[RClassifierModel] .option def findAllByName( - cid: Ident, + cid: CollectiveId, names: NonEmptyList[String] ): ConnectionIO[List[RClassifierModel]] = Select(select(T.all), from(T), T.cid === cid && T.name.in(names)).build @@ -99,7 +102,7 @@ object RClassifierModel { .to[List] def findAllByQuery( - cid: Ident, + cid: CollectiveId, nameQuery: String ): ConnectionIO[List[RClassifierModel]] = Select(select(T.all), from(T), T.cid === cid && T.name.like(nameQuery)).build diff --git a/modules/store/src/main/scala/docspell/store/records/RClassifierSetting.scala b/modules/store/src/main/scala/docspell/store/records/RClassifierSetting.scala index 1e908c03..f5683f8b 100644 --- a/modules/store/src/main/scala/docspell/store/records/RClassifierSetting.scala +++ b/modules/store/src/main/scala/docspell/store/records/RClassifierSetting.scala @@ -18,7 +18,7 @@ import doobie._ import doobie.implicits._ case class RClassifierSetting( - cid: Ident, + cid: CollectiveId, schedule: CalEvent, itemCount: Int, created: Timestamp, @@ -43,7 +43,7 @@ object RClassifierSetting { final case class Table(alias: Option[String]) extends TableDef { val tableName = "classifier_setting" - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val schedule = Column[CalEvent]("schedule", this) val itemCount = Column[Int]("item_count", this) val created = Column[Timestamp]("created", this) @@ -79,19 +79,19 @@ object RClassifierSetting { n2 <- if (n1 <= 0) insert(v) else 0.pure[ConnectionIO] } yield n1 + n2 - def findById(id: Ident): ConnectionIO[Option[RClassifierSetting]] = { + def findById(id: CollectiveId): ConnectionIO[Option[RClassifierSetting]] = { val sql = run(select(T.all), from(T), T.cid === id) sql.query[RClassifierSetting].option } - def delete(coll: Ident): ConnectionIO[Int] = + def delete(coll: CollectiveId): ConnectionIO[Int] = DML.delete(T, T.cid === coll) /** Finds tag categories that exist and match the classifier setting. If the setting * contains a black list, they are removed from the existing categories. If it is a * whitelist, the intersection is returned. */ - def getActiveCategories(coll: Ident): ConnectionIO[List[String]] = + def getActiveCategories(coll: CollectiveId): ConnectionIO[List[String]] = (for { sett <- OptionT(findById(coll)) cats <- OptionT.liftF(RTag.listCategories(coll)) @@ -106,7 +106,7 @@ object RClassifierSetting { /** Checks the json array of tag categories and removes those that are not present * anymore. */ - def fixCategoryList(coll: Ident): ConnectionIO[Int] = + def fixCategoryList(coll: CollectiveId): ConnectionIO[Int] = (for { sett <- OptionT(findById(coll)) cats <- OptionT.liftF(RTag.listCategories(coll)) @@ -131,7 +131,7 @@ object RClassifierSetting { categories.nonEmpty } - def toRecord(coll: Ident, created: Timestamp): RClassifierSetting = + def toRecord(coll: CollectiveId, created: Timestamp): RClassifierSetting = RClassifierSetting( coll, schedule, @@ -145,5 +145,4 @@ object RClassifierSetting { def fromRecord(r: RClassifierSetting): Classifier = Classifier(r.schedule, r.itemCount, r.categoryList, r.listType) } - } diff --git a/modules/store/src/main/scala/docspell/store/records/RClientSettingsCollective.scala b/modules/store/src/main/scala/docspell/store/records/RClientSettingsCollective.scala index 659fde30..ec778044 100644 --- a/modules/store/src/main/scala/docspell/store/records/RClientSettingsCollective.scala +++ b/modules/store/src/main/scala/docspell/store/records/RClientSettingsCollective.scala @@ -20,7 +20,7 @@ import io.circe.Json case class RClientSettingsCollective( id: Ident, clientId: Ident, - cid: Ident, + cid: CollectiveId, settingsData: Json, updated: Timestamp, created: Timestamp @@ -33,7 +33,7 @@ object RClientSettingsCollective { val id = Column[Ident]("id", this) val clientId = Column[Ident]("client_id", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val settingsData = Column[Json]("settings_data", this) val updated = Column[Timestamp]("updated", this) val created = Column[Timestamp]("created", this) @@ -55,7 +55,7 @@ object RClientSettingsCollective { def updateSettings( clientId: Ident, - cid: Ident, + cid: CollectiveId, data: Json, updateTs: Timestamp ): ConnectionIO[Int] = @@ -65,7 +65,7 @@ object RClientSettingsCollective { DML.set(T.settingsData.setTo(data), T.updated.setTo(updateTs)) ) - def upsert(clientId: Ident, cid: Ident, data: Json): ConnectionIO[Int] = + def upsert(clientId: Ident, cid: CollectiveId, data: Json): ConnectionIO[Int] = for { id <- Ident.randomId[ConnectionIO] now <- Timestamp.current[ConnectionIO] @@ -75,10 +75,13 @@ object RClientSettingsCollective { else 0.pure[ConnectionIO] } yield nup + nin - def delete(clientId: Ident, cid: Ident): ConnectionIO[Int] = + def delete(clientId: Ident, cid: CollectiveId): ConnectionIO[Int] = DML.delete(T, T.clientId === clientId && T.cid === cid) - def find(clientId: Ident, cid: Ident): ConnectionIO[Option[RClientSettingsCollective]] = + def find( + clientId: Ident, + cid: CollectiveId + ): ConnectionIO[Option[RClientSettingsCollective]] = run(select(T.all), from(T), T.clientId === clientId && T.cid === cid) .query[RClientSettingsCollective] .option diff --git a/modules/store/src/main/scala/docspell/store/records/RCollective.scala b/modules/store/src/main/scala/docspell/store/records/RCollective.scala index 2bbecaa6..da31fb18 100644 --- a/modules/store/src/main/scala/docspell/store/records/RCollective.scala +++ b/modules/store/src/main/scala/docspell/store/records/RCollective.scala @@ -17,7 +17,8 @@ import doobie._ import doobie.implicits._ case class RCollective( - id: Ident, + id: CollectiveId, + name: Ident, state: CollectiveState, language: Language, integrationEnabled: Boolean, @@ -28,17 +29,25 @@ object RCollective { final case class Table(alias: Option[String]) extends TableDef { val tableName = "collective" - val id = Column[Ident]("cid", this) + val id = Column[CollectiveId]("id", this) + val name = Column[Ident]("name", this) val state = Column[CollectiveState]("state", this) val language = Column[Language]("doclang", this) val integration = Column[Boolean]("integration_enabled", this) val created = Column[Timestamp]("created", this) - val all = NonEmptyList.of[Column[_]](id, state, language, integration, created) + val all = NonEmptyList.of[Column[_]](id, name, state, language, integration, created) } def makeDefault(collName: Ident, created: Timestamp): RCollective = - RCollective(collName, CollectiveState.Active, Language.German, true, created) + RCollective( + CollectiveId.unknown, + collName, + CollectiveState.Active, + Language.German, + true, + created + ) val T = Table(None) def as(alias: String): Table = @@ -48,25 +57,23 @@ object RCollective { DML.insert( T, T.all, - fr"${value.id},${value.state},${value.language},${value.integrationEnabled},${value.created}" + fr"${value.id},${value.name},${value.state},${value.language},${value.integrationEnabled},${value.created}" ) def update(value: RCollective): ConnectionIO[Int] = DML.update( T, T.id === value.id, - DML.set( - T.state.setTo(value.state) - ) + DML.set(T.state.setTo(value.state)) ) - def findLanguage(cid: Ident): ConnectionIO[Option[Language]] = + def findLanguage(cid: CollectiveId): ConnectionIO[Option[Language]] = Select(T.language.s, from(T), T.id === cid).build.query[Option[Language]].unique - def updateLanguage(cid: Ident, lang: Language): ConnectionIO[Int] = + def updateLanguage(cid: CollectiveId, lang: Language): ConnectionIO[Int] = DML.update(T, T.id === cid, DML.set(T.language.setTo(lang))) - def updateSettings(cid: Ident, settings: Settings): ConnectionIO[Int] = + def updateSettings(cid: CollectiveId, settings: Settings): ConnectionIO[Int] = for { n1 <- DML.update( T, @@ -94,7 +101,7 @@ object RCollective { // this hides categories that have been deleted in the meantime // they are finally removed from the json array once the learn classifier task is run - def getSettings(coll: Ident): ConnectionIO[Option[Settings]] = + def getSettings(coll: CollectiveId): ConnectionIO[Option[Settings]] = (for { sett <- OptionT(getRawSettings(coll)) prev <- OptionT.pure[ConnectionIO](sett.classifier) @@ -103,7 +110,7 @@ object RCollective { pws <- OptionT.liftF(RCollectivePassword.findAll(coll)) } yield sett.copy(classifier = next, passwords = pws.map(_.password))).value - private def getRawSettings(coll: Ident): ConnectionIO[Option[Settings]] = { + private def getRawSettings(coll: CollectiveId): ConnectionIO[Option[Settings]] = { import RClassifierSetting.stringListMeta val c = RCollective.as("c") @@ -127,7 +134,7 @@ object RCollective { ).build.query[Settings].option } - def findById(cid: Ident): ConnectionIO[Option[RCollective]] = { + def findById(cid: CollectiveId): ConnectionIO[Option[RCollective]] = { val sql = run(select(T.all), from(T), T.id === cid) sql.query[RCollective].option } @@ -142,7 +149,7 @@ object RCollective { ).build.query[RCollective].option } - def existsById(cid: Ident): ConnectionIO[Boolean] = { + def existsById(cid: CollectiveId): ConnectionIO[Boolean] = { val sql = Select(count(T.id).s, from(T), T.id === cid).build sql.query[Int].unique.map(_ > 0) } diff --git a/modules/store/src/main/scala/docspell/store/records/RCollectivePassword.scala b/modules/store/src/main/scala/docspell/store/records/RCollectivePassword.scala index c7931d20..cb97651c 100644 --- a/modules/store/src/main/scala/docspell/store/records/RCollectivePassword.scala +++ b/modules/store/src/main/scala/docspell/store/records/RCollectivePassword.scala @@ -19,7 +19,7 @@ import doobie.implicits._ final case class RCollectivePassword( id: Ident, - cid: Ident, + cid: CollectiveId, password: Password, created: Timestamp ) {} @@ -29,7 +29,7 @@ object RCollectivePassword { val tableName: String = "collective_password" val id = Column[Ident]("id", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val password = Column[Password]("pass", this) val created = Column[Timestamp]("created", this) @@ -41,7 +41,7 @@ object RCollectivePassword { def as(alias: String): Table = Table(Some(alias)) - def createNew[F[_]: Sync](cid: Ident, pw: Password): F[RCollectivePassword] = + def createNew[F[_]: Sync](cid: CollectiveId, pw: Password): F[RCollectivePassword] = for { id <- Ident.randomId[F] time <- Timestamp.current[F] @@ -63,15 +63,15 @@ object RCollectivePassword { def deleteById(id: Ident): ConnectionIO[Int] = DML.delete(T, T.id === id) - def deleteByPassword(cid: Ident, pw: Password): ConnectionIO[Int] = + def deleteByPassword(cid: CollectiveId, pw: Password): ConnectionIO[Int] = DML.delete(T, T.password === pw && T.cid === cid) - def findAll(cid: Ident): ConnectionIO[List[RCollectivePassword]] = + def findAll(cid: CollectiveId): ConnectionIO[List[RCollectivePassword]] = Select(select(T.all), from(T), T.cid === cid).build .query[RCollectivePassword] .to[List] - def replaceAll(cid: Ident, pws: List[Password]): ConnectionIO[Int] = + def replaceAll(cid: CollectiveId, pws: List[Password]): ConnectionIO[Int] = for { k <- DML.delete(T, T.cid === cid) pw <- pws.traverse(p => createNew[ConnectionIO](cid, p)) diff --git a/modules/store/src/main/scala/docspell/store/records/RCustomField.scala b/modules/store/src/main/scala/docspell/store/records/RCustomField.scala index 89db8192..06a70916 100644 --- a/modules/store/src/main/scala/docspell/store/records/RCustomField.scala +++ b/modules/store/src/main/scala/docspell/store/records/RCustomField.scala @@ -20,7 +20,7 @@ case class RCustomField( id: Ident, name: Ident, label: Option[String], - cid: Ident, + cid: CollectiveId, ftype: CustomFieldType, created: Timestamp ) @@ -32,7 +32,7 @@ object RCustomField { val id = Column[Ident]("id", this) val name = Column[Ident]("name", this) val label = Column[String]("label", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val ftype = Column[CustomFieldType]("ftype", this) val created = Column[Timestamp]("created", this) @@ -50,26 +50,29 @@ object RCustomField { fr"${value.id},${value.name},${value.label},${value.cid},${value.ftype},${value.created}" ) - def exists(fname: Ident, coll: Ident): ConnectionIO[Boolean] = + def exists(fname: Ident, coll: CollectiveId): ConnectionIO[Boolean] = run(select(count(T.id)), from(T), T.name === fname && T.cid === coll) .query[Int] .unique .map(_ > 0) - def findById(fid: Ident, coll: Ident): ConnectionIO[Option[RCustomField]] = + def findById(fid: Ident, coll: CollectiveId): ConnectionIO[Option[RCustomField]] = run(select(T.all), from(T), T.id === fid && T.cid === coll).query[RCustomField].option - def findByIdOrName(idOrName: Ident, coll: Ident): ConnectionIO[Option[RCustomField]] = + def findByIdOrName( + idOrName: Ident, + coll: CollectiveId + ): ConnectionIO[Option[RCustomField]] = Select( select(T.all), from(T), T.cid === coll && (T.id === idOrName || T.name === idOrName) ).build.query[RCustomField].option - def deleteById(fid: Ident, coll: Ident): ConnectionIO[Int] = + def deleteById(fid: Ident, coll: CollectiveId): ConnectionIO[Int] = DML.delete(T, T.id === fid && T.cid === coll) - def findAll(coll: Ident): ConnectionIO[Vector[RCustomField]] = + def findAll(coll: CollectiveId): ConnectionIO[Vector[RCustomField]] = run(select(T.all), from(T), T.cid === coll).query[RCustomField].to[Vector] def update(value: RCustomField): ConnectionIO[Int] = @@ -97,5 +100,4 @@ object RCustomField { ) else 0.pure[ConnectionIO] } yield n + k - } diff --git a/modules/store/src/main/scala/docspell/store/records/RDownloadQuery.scala b/modules/store/src/main/scala/docspell/store/records/RDownloadQuery.scala index a6bc5f4c..1e89a783 100644 --- a/modules/store/src/main/scala/docspell/store/records/RDownloadQuery.scala +++ b/modules/store/src/main/scala/docspell/store/records/RDownloadQuery.scala @@ -17,7 +17,7 @@ import doobie.implicits._ final case class RDownloadQuery( id: Ident, - cid: Ident, + cid: CollectiveId, fileId: FileKey, fileCount: Int, created: Timestamp, @@ -31,7 +31,7 @@ object RDownloadQuery { val tableName = "download_query" val id: Column[Ident] = Column("id", this) - val cid: Column[Ident] = Column("cid", this) + val cid: Column[CollectiveId] = Column("coll_id", this) val fileId: Column[FileKey] = Column("file_id", this) val fileCount: Column[Int] = Column("file_count", this) val created: Column[Timestamp] = Column("created", this) diff --git a/modules/store/src/main/scala/docspell/store/records/REmptyTrashSetting.scala b/modules/store/src/main/scala/docspell/store/records/REmptyTrashSetting.scala index 24fcb0f7..0b5cf86e 100644 --- a/modules/store/src/main/scala/docspell/store/records/REmptyTrashSetting.scala +++ b/modules/store/src/main/scala/docspell/store/records/REmptyTrashSetting.scala @@ -19,7 +19,7 @@ import doobie._ import doobie.implicits._ final case class REmptyTrashSetting( - cid: Ident, + cid: CollectiveId, schedule: CalEvent, minAge: Duration, created: Timestamp @@ -30,7 +30,7 @@ object REmptyTrashSetting { final case class Table(alias: Option[String]) extends TableDef { val tableName = "empty_trash_setting" - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val schedule = Column[CalEvent]("schedule", this) val minAge = Column[Duration]("min_age", this) val created = Column[Timestamp]("created", this) @@ -61,7 +61,7 @@ object REmptyTrashSetting { n2 <- if (n1 <= 0) insert(v) else 0.pure[ConnectionIO] } yield n1 + n2 - def findById(id: Ident): ConnectionIO[Option[REmptyTrashSetting]] = { + def findById(id: CollectiveId): ConnectionIO[Option[REmptyTrashSetting]] = { val sql = run(select(T.all), from(T), T.cid === id) sql.query[REmptyTrashSetting].option } @@ -84,11 +84,11 @@ object REmptyTrashSetting { sql.query[REmptyTrashSetting].streamWithChunkSize(chunkSize) } - def delete(coll: Ident): ConnectionIO[Int] = + def delete(coll: CollectiveId): ConnectionIO[Int] = DML.delete(T, T.cid === coll) final case class EmptyTrash(schedule: CalEvent, minAge: Duration) { - def toRecord(coll: Ident, created: Timestamp): REmptyTrashSetting = + def toRecord(coll: CollectiveId, created: Timestamp): REmptyTrashSetting = REmptyTrashSetting(coll, schedule, minAge, created) } object EmptyTrash { diff --git a/modules/store/src/main/scala/docspell/store/records/REquipment.scala b/modules/store/src/main/scala/docspell/store/records/REquipment.scala index 14e5cc46..a03ff73c 100644 --- a/modules/store/src/main/scala/docspell/store/records/REquipment.scala +++ b/modules/store/src/main/scala/docspell/store/records/REquipment.scala @@ -17,7 +17,7 @@ import doobie.implicits._ case class REquipment( eid: Ident, - cid: Ident, + cid: CollectiveId, name: String, created: Timestamp, updated: Timestamp, @@ -30,7 +30,7 @@ object REquipment { val tableName = "equipment" val eid = Column[Ident]("eid", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val name = Column[String]("name", this) val created = Column[Timestamp]("created", this) val updated = Column[Timestamp]("updated", this) @@ -72,7 +72,7 @@ object REquipment { } yield n } - def existsByName(coll: Ident, ename: String): ConnectionIO[Boolean] = { + def existsByName(coll: CollectiveId, ename: String): ConnectionIO[Boolean] = { val t = Table(None) val sql = run(select(count(t.eid)), from(t), where(t.cid === coll, t.name === ename)) sql.query[Int].unique.map(_ > 0) @@ -85,7 +85,7 @@ object REquipment { } def findAll( - coll: Ident, + coll: CollectiveId, nameQ: Option[String], order: Table => NonEmptyList[OrderBy] ): ConnectionIO[Vector[REquipment]] = { @@ -100,7 +100,7 @@ object REquipment { } def findLike( - coll: Ident, + coll: CollectiveId, equipName: String, use: NonEmptyList[EquipmentUse] ): ConnectionIO[Vector[IdRef]] = { @@ -114,7 +114,7 @@ object REquipment { .to[Vector] } - def delete(id: Ident, coll: Ident): ConnectionIO[Int] = { + def delete(id: Ident, coll: CollectiveId): ConnectionIO[Int] = { val t = Table(None) DML.delete(t, t.eid === id && t.cid === coll) } diff --git a/modules/store/src/main/scala/docspell/store/records/RFolder.scala b/modules/store/src/main/scala/docspell/store/records/RFolder.scala index 562f324e..b4ef19b3 100644 --- a/modules/store/src/main/scala/docspell/store/records/RFolder.scala +++ b/modules/store/src/main/scala/docspell/store/records/RFolder.scala @@ -20,25 +20,29 @@ import doobie.implicits._ case class RFolder( id: Ident, name: String, - collectiveId: Ident, + collectiveId: CollectiveId, owner: Ident, created: Timestamp ) object RFolder { - def newFolder[F[_]: Sync](name: String, account: AccountId): F[RFolder] = + def newFolder[F[_]: Sync]( + name: String, + collective: CollectiveId, + user: Ident + ): F[RFolder] = for { nId <- Ident.randomId[F] now <- Timestamp.current[F] - } yield RFolder(nId, name, account.collective, account.user, now) + } yield RFolder(nId, name, collective, user, now) final case class Table(alias: Option[String]) extends TableDef { val tableName = "folder" val id = Column[Ident]("id", this) val name = Column[String]("name", this) - val collective = Column[Ident]("cid", this) + val collective = Column[CollectiveId]("coll_id", this) val owner = Column[Ident]("owner", this) val created = Column[Timestamp]("created", this) @@ -63,7 +67,7 @@ object RFolder { DML.set(T.name.setTo(v.name)) ) - def existsByName(coll: Ident, folderName: String): ConnectionIO[Boolean] = + def existsByName(coll: CollectiveId, folderName: String): ConnectionIO[Boolean] = run(select(count(T.id)), from(T), T.collective === coll && T.name === folderName) .query[Int] .unique @@ -77,7 +81,7 @@ object RFolder { def requireIdByIdOrName( folderId: Ident, name: String, - collective: Ident + collective: CollectiveId ): ConnectionIO[Ident] = { val sql = run( select(T.id), @@ -94,7 +98,7 @@ object RFolder { } def findAll( - coll: Ident, + coll: CollectiveId, nameQ: Option[String], order: Table => Column[_] ): ConnectionIO[Vector[RFolder]] = { diff --git a/modules/store/src/main/scala/docspell/store/records/RItem.scala b/modules/store/src/main/scala/docspell/store/records/RItem.scala index 36eadefe..e4168f64 100644 --- a/modules/store/src/main/scala/docspell/store/records/RItem.scala +++ b/modules/store/src/main/scala/docspell/store/records/RItem.scala @@ -20,7 +20,7 @@ import doobie.implicits._ case class RItem( id: Ident, - cid: Ident, + cid: CollectiveId, name: String, itemDate: Option[Timestamp], source: String, @@ -40,7 +40,7 @@ case class RItem( object RItem { def newItem[F[_]: Sync]( - cid: Ident, + cid: CollectiveId, name: String, source: String, direction: Direction, @@ -73,7 +73,7 @@ object RItem { val tableName = "item" val id = Column[Ident]("itemid", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val name = Column[String]("name", this) val itemDate = Column[Timestamp]("itemdate", this) val source = Column[String]("source", this) @@ -166,7 +166,7 @@ object RItem { def updateStateForCollective( itemIds: NonEmptyList[Ident], itemState: ItemState, - coll: Ident + coll: CollectiveId ): ConnectionIO[Int] = for { t <- currentTime @@ -180,7 +180,7 @@ object RItem { def restoreStateForCollective( itemIds: NonEmptyList[Ident], itemState: ItemState, - coll: Ident + coll: CollectiveId ): ConnectionIO[Int] = for { t <- currentTime @@ -193,7 +193,7 @@ object RItem { def updateDirection( itemIds: NonEmptyList[Ident], - coll: Ident, + coll: CollectiveId, dir: Direction ): ConnectionIO[Int] = for { @@ -207,7 +207,7 @@ object RItem { def updateCorrOrg( itemIds: NonEmptyList[Ident], - coll: Ident, + coll: CollectiveId, org: Option[Ident] ): ConnectionIO[Int] = for { @@ -219,7 +219,7 @@ object RItem { ) } yield n - def removeCorrOrg(coll: Ident, currentOrg: Ident): ConnectionIO[Int] = + def removeCorrOrg(coll: CollectiveId, currentOrg: Ident): ConnectionIO[Int] = for { t <- currentTime n <- DML.update( @@ -231,7 +231,7 @@ object RItem { def updateCorrPerson( itemIds: NonEmptyList[Ident], - coll: Ident, + coll: CollectiveId, person: Option[Ident] ): ConnectionIO[Int] = for { @@ -243,7 +243,7 @@ object RItem { ) } yield n - def removeCorrPerson(coll: Ident, currentPerson: Ident): ConnectionIO[Int] = + def removeCorrPerson(coll: CollectiveId, currentPerson: Ident): ConnectionIO[Int] = for { t <- currentTime n <- DML.update( @@ -255,7 +255,7 @@ object RItem { def updateConcPerson( itemIds: NonEmptyList[Ident], - coll: Ident, + coll: CollectiveId, person: Option[Ident] ): ConnectionIO[Int] = for { @@ -267,7 +267,7 @@ object RItem { ) } yield n - def removeConcPerson(coll: Ident, currentPerson: Ident): ConnectionIO[Int] = + def removeConcPerson(coll: CollectiveId, currentPerson: Ident): ConnectionIO[Int] = for { t <- currentTime n <- DML.update( @@ -279,7 +279,7 @@ object RItem { def updateConcEquip( itemIds: NonEmptyList[Ident], - coll: Ident, + coll: CollectiveId, equip: Option[Ident] ): ConnectionIO[Int] = for { @@ -291,7 +291,7 @@ object RItem { ) } yield n - def removeConcEquip(coll: Ident, currentEquip: Ident): ConnectionIO[Int] = + def removeConcEquip(coll: CollectiveId, currentEquip: Ident): ConnectionIO[Int] = for { t <- currentTime n <- DML.update( @@ -303,7 +303,7 @@ object RItem { def updateFolder( itemId: Ident, - coll: Ident, + coll: CollectiveId, folderIdOrName: Option[String] ): ConnectionIO[(Int, Option[Ident])] = for { @@ -321,7 +321,11 @@ object RItem { ) } yield (n, fid) - def updateNotes(itemId: Ident, coll: Ident, text: Option[String]): ConnectionIO[Int] = + def updateNotes( + itemId: Ident, + coll: CollectiveId, + text: Option[String] + ): ConnectionIO[Int] = for { t <- currentTime n <- DML.update( @@ -333,7 +337,7 @@ object RItem { def appendNotes( itemId: Ident, - cid: Ident, + cid: CollectiveId, text: String, sep: Option[String] ): ConnectionIO[Option[String]] = { @@ -351,7 +355,7 @@ object RItem { } } - def updateName(itemId: Ident, coll: Ident, itemName: String): ConnectionIO[Int] = + def updateName(itemId: Ident, coll: CollectiveId, itemName: String): ConnectionIO[Int] = for { t <- currentTime n <- DML.update( @@ -363,7 +367,7 @@ object RItem { def updateDate( itemIds: NonEmptyList[Ident], - coll: Ident, + coll: CollectiveId, date: Option[Timestamp] ): ConnectionIO[Int] = for { @@ -377,7 +381,7 @@ object RItem { def updateDueDate( itemIds: NonEmptyList[Ident], - coll: Ident, + coll: CollectiveId, date: Option[Timestamp] ): ConnectionIO[Int] = for { @@ -389,12 +393,12 @@ object RItem { ) } yield n - def deleteByIdAndCollective(itemId: Ident, coll: Ident): ConnectionIO[Int] = + def deleteByIdAndCollective(itemId: Ident, coll: CollectiveId): ConnectionIO[Int] = DML.delete(T, T.id === itemId && T.cid === coll) def setState( itemIds: NonEmptyList[Ident], - coll: Ident, + coll: CollectiveId, state: ItemState ): ConnectionIO[Int] = for { @@ -409,7 +413,7 @@ object RItem { def existsById(itemId: Ident): ConnectionIO[Boolean] = Select(count(T.id).s, from(T), T.id === itemId).build.query[Int].unique.map(_ > 0) - def existsByIdAndCollective(itemId: Ident, coll: Ident): ConnectionIO[Boolean] = + def existsByIdAndCollective(itemId: Ident, coll: CollectiveId): ConnectionIO[Boolean] = Select(count(T.id).s, from(T), T.id === itemId && T.cid === coll).build .query[Int] .unique @@ -417,19 +421,22 @@ object RItem { def existsByIdsAndCollective( itemIds: NonEmptyList[Ident], - coll: Ident + coll: CollectiveId ): ConnectionIO[Boolean] = Select(count(T.id).s, from(T), T.id.in(itemIds) && T.cid === coll).build .query[Int] .unique .map(_ == itemIds.size) - def findByIdAndCollective(itemId: Ident, coll: Ident): ConnectionIO[Option[RItem]] = + def findByIdAndCollective( + itemId: Ident, + coll: CollectiveId + ): ConnectionIO[Option[RItem]] = run(select(T.all), from(T), T.id === itemId && T.cid === coll).query[RItem].option def findAllByIdAndCollective( itemIds: NonEmptyList[Ident], - coll: Ident + coll: CollectiveId ): ConnectionIO[Vector[RItem]] = run(select(T.all), from(T), T.id.in(itemIds) && T.cid === coll) .query[RItem] @@ -439,7 +446,7 @@ object RItem { run(select(T.all), from(T), T.id === itemId).query[RItem].option def findDeleted( - collective: Ident, + collective: CollectiveId, maxUpdated: Timestamp, chunkSize: Int ): Stream[ConnectionIO, RItem] = @@ -451,7 +458,10 @@ object RItem { .query[RItem] .streamWithChunkSize(chunkSize) - def checkByIdAndCollective(itemId: Ident, coll: Ident): ConnectionIO[Option[Ident]] = + def checkByIdAndCollective( + itemId: Ident, + coll: CollectiveId + ): ConnectionIO[Option[Ident]] = Select(T.id.s, from(T), T.id === itemId && T.cid === coll).build.query[Ident].option def removeFolder(folderId: Ident): ConnectionIO[Int] = { @@ -459,9 +469,12 @@ object RItem { DML.update(T, T.folder === folderId, DML.set(T.folder.setTo(empty))) } - def filterItemsFragment(items: NonEmptyList[Ident], coll: Ident): Select = + def filterItemsFragment(items: NonEmptyList[Ident], coll: CollectiveId): Select = Select(select(T.id), from(T), T.cid === coll && T.id.in(items)) - def filterItems(items: NonEmptyList[Ident], coll: Ident): ConnectionIO[Vector[Ident]] = + def filterItems( + items: NonEmptyList[Ident], + coll: CollectiveId + ): ConnectionIO[Vector[Ident]] = filterItemsFragment(items, coll).build.query[Ident].to[Vector] } diff --git a/modules/store/src/main/scala/docspell/store/records/RItemLink.scala b/modules/store/src/main/scala/docspell/store/records/RItemLink.scala index 8270a34a..f589a679 100644 --- a/modules/store/src/main/scala/docspell/store/records/RItemLink.scala +++ b/modules/store/src/main/scala/docspell/store/records/RItemLink.scala @@ -20,14 +20,14 @@ import doobie.implicits._ final case class RItemLink( id: Ident, - cid: Ident, + cid: CollectiveId, item1: Ident, item2: Ident, created: Timestamp ) object RItemLink { - def create[F[_]: Sync](cid: Ident, item1: Ident, item2: Ident): F[RItemLink] = + def create[F[_]: Sync](cid: CollectiveId, item1: Ident, item2: Ident): F[RItemLink] = for { id <- Ident.randomId[F] now <- Timestamp.current[F] @@ -37,7 +37,7 @@ object RItemLink { val tableName = "item_link" val id: Column[Ident] = Column("id", this) - val cid: Column[Ident] = Column("cid", this) + val cid: Column[CollectiveId] = Column("coll_id", this) val item1: Column[Ident] = Column("item1", this) val item2: Column[Ident] = Column("item2", this) val created: Column[Timestamp] = Column("created", this) @@ -62,7 +62,7 @@ object RItemLink { DML.insertSilent(T, T.all, sql"${r.id},${r.cid},$i1,$i2,${r.created}") } - def insertNew(cid: Ident, item1: Ident, item2: Ident): ConnectionIO[Int] = + def insertNew(cid: CollectiveId, item1: Ident, item2: Ident): ConnectionIO[Int] = create[ConnectionIO](cid, item1, item2).flatMap(insert) def update(r: RItemLink): ConnectionIO[Int] = { @@ -77,7 +77,7 @@ object RItemLink { ) } - def exists(cid: Ident, item1: Ident, item2: Ident): ConnectionIO[Boolean] = { + def exists(cid: CollectiveId, item1: Ident, item2: Ident): ConnectionIO[Boolean] = { val (i1, i2) = orderIds(item1, item2) Select( select(count(T.id)), @@ -86,7 +86,7 @@ object RItemLink { ).build.query[Int].unique.map(_ > 0) } - def findLinked(cid: Ident, item: Ident): ConnectionIO[Vector[Ident]] = + def findLinked(cid: CollectiveId, item: Ident): ConnectionIO[Vector[Ident]] = union( Select( select(T.item1), @@ -101,7 +101,7 @@ object RItemLink { ).build.query[Ident].to[Vector] def deleteAll( - cid: Ident, + cid: CollectiveId, item: Ident, related: NonEmptyList[Ident] ): ConnectionIO[Int] = @@ -113,7 +113,7 @@ object RItemLink { ) ) - def delete(cid: Ident, item1: Ident, item2: Ident): ConnectionIO[Int] = { + def delete(cid: CollectiveId, item1: Ident, item2: Ident): ConnectionIO[Int] = { val (i1, i2) = orderIds(item1, item2) DML.delete(T, T.cid === cid && T.item1 === i1 && T.item2 === i2) } diff --git a/modules/store/src/main/scala/docspell/store/records/RNotificationChannel.scala b/modules/store/src/main/scala/docspell/store/records/RNotificationChannel.scala index 94aa89c4..4eb741f3 100644 --- a/modules/store/src/main/scala/docspell/store/records/RNotificationChannel.scala +++ b/modules/store/src/main/scala/docspell/store/records/RNotificationChannel.scala @@ -95,12 +95,12 @@ object RNotificationChannel { RNotificationChannelHttp.update ) - def getByAccount(account: AccountId): ConnectionIO[Vector[RNotificationChannel]] = + def getByAccount(userId: Ident): ConnectionIO[Vector[RNotificationChannel]] = for { - mail <- RNotificationChannelMail.getByAccount(account) - gotify <- RNotificationChannelGotify.getByAccount(account) - matrix <- RNotificationChannelMatrix.getByAccount(account) - http <- RNotificationChannelHttp.getByAccount(account) + mail <- RNotificationChannelMail.getByAccount(userId) + gotify <- RNotificationChannelGotify.getByAccount(userId) + matrix <- RNotificationChannelMatrix.getByAccount(userId) + http <- RNotificationChannelHttp.getByAccount(userId) } yield mail.map(Email.apply) ++ gotify.map(Gotify.apply) ++ matrix.map( Matrix.apply ) ++ http.map(Http.apply) @@ -177,12 +177,12 @@ object RNotificationChannel { .flatMap(_.flatTraverse(find)) } - def deleteByAccount(id: Ident, account: AccountId): ConnectionIO[Int] = + def deleteByAccount(id: Ident, userId: Ident): ConnectionIO[Int] = for { - n1 <- RNotificationChannelMail.deleteByAccount(id, account) - n2 <- RNotificationChannelGotify.deleteByAccount(id, account) - n3 <- RNotificationChannelMatrix.deleteByAccount(id, account) - n4 <- RNotificationChannelHttp.deleteByAccount(id, account) + n1 <- RNotificationChannelMail.deleteByAccount(id, userId) + n2 <- RNotificationChannelGotify.deleteByAccount(id, userId) + n3 <- RNotificationChannelMatrix.deleteByAccount(id, userId) + n4 <- RNotificationChannelHttp.deleteByAccount(id, userId) } yield n1 + n2 + n3 + n4 def fromChannel( diff --git a/modules/store/src/main/scala/docspell/store/records/RNotificationChannelGotify.scala b/modules/store/src/main/scala/docspell/store/records/RNotificationChannelGotify.scala index 40bb9528..c7909287 100644 --- a/modules/store/src/main/scala/docspell/store/records/RNotificationChannelGotify.scala +++ b/modules/store/src/main/scala/docspell/store/records/RNotificationChannelGotify.scala @@ -77,27 +77,21 @@ object RNotificationChannelGotify { ) def getByAccount( - account: AccountId + userId: Ident ): ConnectionIO[Vector[RNotificationChannelGotify]] = { - val user = RUser.as("u") val gotify = as("c") Select( select(gotify.all), - from(gotify).innerJoin(user, user.uid === gotify.uid), - user.cid === account.collective && user.login === account.user + from(gotify), + gotify.uid === userId ).build.query[RNotificationChannelGotify].to[Vector] } def deleteById(id: Ident): ConnectionIO[Int] = DML.delete(T, T.id === id) - def deleteByAccount(id: Ident, account: AccountId): ConnectionIO[Int] = { - val u = RUser.as("u") - DML.delete( - T, - T.id === id && T.uid.in(Select(select(u.uid), from(u), u.isAccount(account))) - ) - } + def deleteByAccount(id: Ident, userId: Ident): ConnectionIO[Int] = + DML.delete(T, T.id === id && T.uid === userId) def findRefs(ids: NonEmptyList[Ident]): Select = Select( diff --git a/modules/store/src/main/scala/docspell/store/records/RNotificationChannelHttp.scala b/modules/store/src/main/scala/docspell/store/records/RNotificationChannelHttp.scala index b06eaae7..3d9a9249 100644 --- a/modules/store/src/main/scala/docspell/store/records/RNotificationChannelHttp.scala +++ b/modules/store/src/main/scala/docspell/store/records/RNotificationChannelHttp.scala @@ -61,26 +61,23 @@ object RNotificationChannelHttp { DML.set(T.url.setTo(r.url), T.name.setTo(r.name)) ) - def getByAccount(account: AccountId): ConnectionIO[Vector[RNotificationChannelHttp]] = { - val user = RUser.as("u") + def getByAccount(userId: Ident): ConnectionIO[Vector[RNotificationChannelHttp]] = { val http = as("c") Select( select(http.all), - from(http).innerJoin(user, user.uid === http.uid), - user.cid === account.collective && user.login === account.user + from(http), + http.uid === userId ).build.query[RNotificationChannelHttp].to[Vector] } def deleteById(id: Ident): ConnectionIO[Int] = DML.delete(T, T.id === id) - def deleteByAccount(id: Ident, account: AccountId): ConnectionIO[Int] = { - val u = RUser.as("u") + def deleteByAccount(id: Ident, userId: Ident): ConnectionIO[Int] = DML.delete( T, - T.id === id && T.uid.in(Select(select(u.uid), from(u), u.isAccount(account))) + T.id === id && T.uid === userId ) - } def findRefs(ids: NonEmptyList[Ident]): Select = Select( diff --git a/modules/store/src/main/scala/docspell/store/records/RNotificationChannelMail.scala b/modules/store/src/main/scala/docspell/store/records/RNotificationChannelMail.scala index d73fe0c8..3309d02d 100644 --- a/modules/store/src/main/scala/docspell/store/records/RNotificationChannelMail.scala +++ b/modules/store/src/main/scala/docspell/store/records/RNotificationChannelMail.scala @@ -71,26 +71,20 @@ object RNotificationChannelMail { .query[RNotificationChannelMail] .option - def getByAccount(account: AccountId): ConnectionIO[Vector[RNotificationChannelMail]] = { - val user = RUser.as("u") - val gotify = as("c") + def getByAccount(userId: Ident): ConnectionIO[Vector[RNotificationChannelMail]] = { + val mail = as("c") Select( - select(gotify.all), - from(gotify).innerJoin(user, user.uid === gotify.uid), - user.cid === account.collective && user.login === account.user + select(mail.all), + from(mail), + mail.uid === userId ).build.query[RNotificationChannelMail].to[Vector] } def deleteById(id: Ident): ConnectionIO[Int] = DML.delete(T, T.id === id) - def deleteByAccount(id: Ident, account: AccountId): ConnectionIO[Int] = { - val u = RUser.as("u") - DML.delete( - T, - T.id === id && T.uid.in(Select(select(u.uid), from(u), u.isAccount(account))) - ) - } + def deleteByAccount(id: Ident, userId: Ident): ConnectionIO[Int] = + DML.delete(T, T.id === id && T.uid === userId) def findRefs(ids: NonEmptyList[Ident]): Select = Select( diff --git a/modules/store/src/main/scala/docspell/store/records/RNotificationChannelMatrix.scala b/modules/store/src/main/scala/docspell/store/records/RNotificationChannelMatrix.scala index f6965b88..ae50c3ba 100644 --- a/modules/store/src/main/scala/docspell/store/records/RNotificationChannelMatrix.scala +++ b/modules/store/src/main/scala/docspell/store/records/RNotificationChannelMatrix.scala @@ -86,27 +86,25 @@ object RNotificationChannelMatrix { .option def getByAccount( - account: AccountId + userId: Ident ): ConnectionIO[Vector[RNotificationChannelMatrix]] = { - val user = RUser.as("u") - val gotify = as("c") + + val matrix = as("c") Select( - select(gotify.all), - from(gotify).innerJoin(user, user.uid === gotify.uid), - user.cid === account.collective && user.login === account.user + select(matrix.all), + from(matrix), + matrix.uid === userId ).build.query[RNotificationChannelMatrix].to[Vector] } def deleteById(id: Ident): ConnectionIO[Int] = DML.delete(T, T.id === id) - def deleteByAccount(id: Ident, account: AccountId): ConnectionIO[Int] = { - val u = RUser.as("u") + def deleteByAccount(id: Ident, userId: Ident): ConnectionIO[Int] = DML.delete( T, - T.id === id && T.uid.in(Select(select(u.uid), from(u), u.isAccount(account))) + T.id === id && T.uid === userId ) - } def findRefs(ids: NonEmptyList[Ident]): Select = Select( diff --git a/modules/store/src/main/scala/docspell/store/records/RNotificationHook.scala b/modules/store/src/main/scala/docspell/store/records/RNotificationHook.scala index 75c22d85..da9425f5 100644 --- a/modules/store/src/main/scala/docspell/store/records/RNotificationHook.scala +++ b/modules/store/src/main/scala/docspell/store/records/RNotificationHook.scala @@ -58,13 +58,11 @@ object RNotificationHook { sql"${r.id},${r.uid},${r.enabled},${r.allEvents},${r.eventFilter},${r.created}" ) - def deleteByAccount(id: Ident, account: AccountId): ConnectionIO[Int] = { - val u = RUser.as("u") + def deleteByAccount(id: Ident, userId: Ident): ConnectionIO[Int] = DML.delete( T, - T.id === id && T.uid.in(Select(select(u.uid), from(u), u.isAccount(account))) + T.id === id && T.uid === userId ) - } def update(r: RNotificationHook): ConnectionIO[Int] = DML.update( @@ -77,11 +75,11 @@ object RNotificationHook { ) ) - def findByAccount(account: AccountId): ConnectionIO[Vector[RNotificationHook]] = + def findByAccount(userId: Ident): ConnectionIO[Vector[RNotificationHook]] = Select( select(T.all), from(T), - T.uid.in(Select(select(RUser.T.uid), from(RUser.T), RUser.T.isAccount(account))) + T.uid === userId ).build.query[RNotificationHook].to[Vector] def getById(id: Ident, userId: Ident): ConnectionIO[Option[RNotificationHook]] = @@ -92,17 +90,15 @@ object RNotificationHook { ).build.query[RNotificationHook].option def findAllByAccount( - account: AccountId + userId: Ident ): ConnectionIO[Vector[(RNotificationHook, List[EventType])]] = { val h = RNotificationHook.as("h") val e = RNotificationHookEvent.as("e") - val userSelect = - Select(select(RUser.T.uid), from(RUser.T), RUser.T.isAccount(account)) val withEvents = Select( select(h.all :+ e.eventType), from(h).innerJoin(e, e.hookId === h.id), - h.uid.in(userSelect) + h.uid === userId ).orderBy(h.id) .build .query[(RNotificationHook, EventType)] @@ -113,7 +109,7 @@ object RNotificationHook { Select( select(h.all), from(h), - h.id.notIn(Select(select(e.hookId), from(e))) && h.uid.in(userSelect) + h.id.notIn(Select(select(e.hookId), from(e))) && h.uid === userId ).build .query[RNotificationHook] .to[Vector] diff --git a/modules/store/src/main/scala/docspell/store/records/ROrganization.scala b/modules/store/src/main/scala/docspell/store/records/ROrganization.scala index c1fa07b2..843cdd27 100644 --- a/modules/store/src/main/scala/docspell/store/records/ROrganization.scala +++ b/modules/store/src/main/scala/docspell/store/records/ROrganization.scala @@ -19,7 +19,7 @@ import doobie.implicits._ case class ROrganization( oid: Ident, - cid: Ident, + cid: CollectiveId, name: String, street: String, zip: String, @@ -40,7 +40,7 @@ object ROrganization { val tableName = "organization" val oid = Column[Ident]("oid", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val name = Column[String]("name", this) val street = Column[String]("street", this) val zip = Column[String]("zip", this) @@ -103,24 +103,24 @@ object ROrganization { } yield n } - def existsByName(coll: Ident, oname: String): ConnectionIO[Boolean] = + def existsByName(coll: CollectiveId, oname: String): ConnectionIO[Boolean] = run(select(count(T.oid)), from(T), T.cid === coll && T.name === oname) .query[Int] .unique .map(_ > 0) def findById(id: Ident): ConnectionIO[Option[ROrganization]] = { - val sql = run(select(T.all), from(T), T.cid === id) + val sql = run(select(T.all), from(T), T.oid === id) sql.query[ROrganization].option } - def find(coll: Ident, orgName: String): ConnectionIO[Option[ROrganization]] = { + def find(coll: CollectiveId, orgName: String): ConnectionIO[Option[ROrganization]] = { val sql = run(select(T.all), from(T), T.cid === coll && T.name === orgName) sql.query[ROrganization].option } def findLike( - coll: Ident, + coll: CollectiveId, orgName: String, use: Nel[OrgUse] ): ConnectionIO[Vector[IdRef]] = @@ -135,7 +135,7 @@ object ROrganization { .to[Vector] def findLike( - coll: Ident, + coll: CollectiveId, contactKind: ContactKind, value: String ): ConnectionIO[Vector[IdRef]] = { @@ -153,7 +153,7 @@ object ROrganization { } def findAll( - coll: Ident, + coll: CollectiveId, order: Table => Column[_] ): Stream[ConnectionIO, ROrganization] = { val sql = Select(select(T.all), from(T), T.cid === coll).orderBy(order(T)) @@ -161,7 +161,7 @@ object ROrganization { } def findAllRef( - coll: Ident, + coll: CollectiveId, nameQ: Option[String], order: Table => Nel[OrderBy] ): ConnectionIO[Vector[IdRef]] = { @@ -173,6 +173,6 @@ object ROrganization { sql.build.query[IdRef].to[Vector] } - def delete(id: Ident, coll: Ident): ConnectionIO[Int] = + def delete(id: Ident, coll: CollectiveId): ConnectionIO[Int] = DML.delete(T, T.oid === id && T.cid === coll) } diff --git a/modules/store/src/main/scala/docspell/store/records/RPerson.scala b/modules/store/src/main/scala/docspell/store/records/RPerson.scala index 4d264c2d..496ad2f6 100644 --- a/modules/store/src/main/scala/docspell/store/records/RPerson.scala +++ b/modules/store/src/main/scala/docspell/store/records/RPerson.scala @@ -20,7 +20,7 @@ import doobie.implicits._ case class RPerson( pid: Ident, - cid: Ident, + cid: CollectiveId, name: String, street: String, zip: String, @@ -41,7 +41,7 @@ object RPerson { val tableName = "person" val pid = Column[Ident]("pid", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val name = Column[String]("name", this) val street = Column[String]("street", this) val zip = Column[String]("zip", this) @@ -103,24 +103,24 @@ object RPerson { } yield n } - def existsByName(coll: Ident, pname: String): ConnectionIO[Boolean] = + def existsByName(coll: CollectiveId, pname: String): ConnectionIO[Boolean] = run(select(count(T.pid)), from(T), T.cid === coll && T.name === pname) .query[Int] .unique .map(_ > 0) def findById(id: Ident): ConnectionIO[Option[RPerson]] = { - val sql = run(select(T.all), from(T), T.cid === id) + val sql = run(select(T.all), from(T), T.pid === id) sql.query[RPerson].option } - def find(coll: Ident, personName: String): ConnectionIO[Option[RPerson]] = { + def find(coll: CollectiveId, personName: String): ConnectionIO[Option[RPerson]] = { val sql = run(select(T.all), from(T), T.cid === coll && T.name === personName) sql.query[RPerson].option } def findLike( - coll: Ident, + coll: CollectiveId, personName: String, use: Nel[PersonUse] ): ConnectionIO[Vector[IdRef]] = @@ -131,7 +131,7 @@ object RPerson { ).query[IdRef].to[Vector] def findLike( - coll: Ident, + coll: CollectiveId, contactKind: ContactKind, value: String, use: Nel[PersonUse] @@ -152,7 +152,7 @@ object RPerson { } def findAll( - coll: Ident, + coll: CollectiveId, order: Table => Column[_] ): Stream[ConnectionIO, RPerson] = { val sql = Select(select(T.all), from(T), T.cid === coll).orderBy(order(T)) @@ -160,7 +160,7 @@ object RPerson { } def findAllRef( - coll: Ident, + coll: CollectiveId, nameQ: Option[String], order: Table => Nel[OrderBy] ): ConnectionIO[Vector[IdRef]] = { @@ -172,7 +172,7 @@ object RPerson { sql.build.query[IdRef].to[Vector] } - def delete(personId: Ident, coll: Ident): ConnectionIO[Int] = + def delete(personId: Ident, coll: CollectiveId): ConnectionIO[Int] = DML.delete(T, T.pid === personId && T.cid === coll) def findOrganization(ids: Set[Ident]): ConnectionIO[Vector[PersonRef]] = diff --git a/modules/store/src/main/scala/docspell/store/records/RQueryBookmark.scala b/modules/store/src/main/scala/docspell/store/records/RQueryBookmark.scala index 08d21b71..2d18cd13 100644 --- a/modules/store/src/main/scala/docspell/store/records/RQueryBookmark.scala +++ b/modules/store/src/main/scala/docspell/store/records/RQueryBookmark.scala @@ -23,7 +23,7 @@ final case class RQueryBookmark( name: String, label: Option[String], userId: Option[Ident], - cid: Ident, + cid: CollectiveId, query: ItemQuery, created: Timestamp ) { @@ -39,13 +39,13 @@ final case class RQueryBookmark( object RQueryBookmark { final case class Table(alias: Option[String]) extends TableDef { - val tableName = "query_bookmark"; + val tableName = "query_bookmark" val id = Column[Ident]("id", this) val name = Column[String]("name", this) val label = Column[String]("label", this) val userId = Column[Ident]("user_id", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val query = Column[ItemQuery]("query", this) val created = Column[Timestamp]("created", this) @@ -59,22 +59,21 @@ object RQueryBookmark { def as(alias: String): Table = Table(Some(alias)) def createNew( - account: AccountId, + collective: CollectiveId, + userId: Option[Ident], name: String, label: Option[String], - query: ItemQuery, - personal: Boolean + query: ItemQuery ): ConnectionIO[RQueryBookmark] = for { - userId <- RUser.getIdByAccount(account) curTime <- Timestamp.current[ConnectionIO] id <- Ident.randomId[ConnectionIO] } yield RQueryBookmark( id, name, label, - if (personal) userId.some else None, - account.collective, + userId, + collective, query, curTime ) @@ -100,23 +99,20 @@ object RQueryBookmark { ) ) - def deleteById(cid: Ident, id: Ident): ConnectionIO[Int] = + def deleteById(cid: CollectiveId, id: Ident): ConnectionIO[Int] = DML.delete(T, T.id === id && T.cid === cid) - def nameExists(account: AccountId, name: String): ConnectionIO[Boolean] = { - val user = RUser.as("u") + def nameExists( + collId: CollectiveId, + userId: Ident, + name: String + ): ConnectionIO[Boolean] = { val bm = RQueryBookmark.as("bm") - val users = Select( - user.uid.s, - from(user), - user.cid === account.collective && user.login === account.user - ) Select( select(count(bm.id)), from(bm), - bm.name === name && bm.cid === account.collective && (bm.userId.isNull || bm.userId - .in(users)) + bm.name === name && bm.cid === collId && (bm.userId.isNull || bm.userId === userId) ).build.query[Int].unique.map(_ > 0) } @@ -127,13 +123,14 @@ object RQueryBookmark { // checked before (and therefore subject to race conditions, but is // neglected here) def insertIfNotExists( - account: AccountId, + collId: CollectiveId, + userId: Ident, r: ConnectionIO[RQueryBookmark] ): ConnectionIO[AddResult] = for { bm <- r res <- - nameExists(account, bm.name).flatMap { + nameExists(collId, userId, bm.name).flatMap { case true => AddResult .entityExists(s"A bookmark '${bm.name}' already exists.") @@ -142,39 +139,31 @@ object RQueryBookmark { } } yield res - def allForUser(account: AccountId): ConnectionIO[Vector[RQueryBookmark]] = { - val user = RUser.as("u") + def allForUser( + collId: CollectiveId, + userId: Ident + ): ConnectionIO[Vector[RQueryBookmark]] = { val bm = RQueryBookmark.as("bm") - val users = Select( - user.uid.s, - from(user), - user.cid === account.collective && user.login === account.user - ) Select( select(bm.all), from(bm), - bm.cid === account.collective && (bm.userId.isNull || bm.userId.in(users)) + bm.cid === collId && (bm.userId.isNull || bm.userId === userId) ).build.query[RQueryBookmark].to[Vector] } def findByNameOrId( - account: AccountId, + collId: CollectiveId, + userId: Ident, nameOrId: String ): ConnectionIO[Option[RQueryBookmark]] = { - val user = RUser.as("u") val bm = RQueryBookmark.as("bm") - val users = Select( - user.uid.s, - from(user), - user.cid === account.collective && user.login === account.user - ) Select( select(bm.all), from(bm), - bm.cid === account.collective && - (bm.userId.isNull || bm.userId.in(users)) && + bm.cid === collId && + (bm.userId.isNull || bm.userId === userId) && (bm.name === nameOrId || bm.id ==== nameOrId) ).build.query[RQueryBookmark].option } diff --git a/modules/store/src/main/scala/docspell/store/records/RShare.scala b/modules/store/src/main/scala/docspell/store/records/RShare.scala index 3aca8f81..196c3a00 100644 --- a/modules/store/src/main/scala/docspell/store/records/RShare.scala +++ b/modules/store/src/main/scala/docspell/store/records/RShare.scala @@ -32,7 +32,7 @@ final case class RShare( object RShare { final case class Table(alias: Option[String]) extends TableDef { - val tableName = "item_share"; + val tableName = "item_share" val id = Column[Ident]("id", this) val userId = Column[Ident]("user_id", this) @@ -94,7 +94,7 @@ object RShare { else Nil) ) - def findOne(id: Ident, cid: Ident): OptionT[ConnectionIO, (RShare, RUser)] = { + def findOne(id: Ident, cid: CollectiveId): OptionT[ConnectionIO, (RShare, RUser)] = { val s = RShare.as("s") val u = RUser.as("u") @@ -139,7 +139,7 @@ object RShare { }) def findOneByCollective( - cid: Ident, + cid: CollectiveId, enabled: Option[Boolean], nameOrId: String ): ConnectionIO[Option[RShare]] = { @@ -156,7 +156,7 @@ object RShare { } def findAllByCollective( - cid: Ident, + cid: CollectiveId, ownerLogin: Option[Ident], q: Option[String] ): ConnectionIO[List[(RShare, RUser)]] = { @@ -177,7 +177,7 @@ object RShare { .to[List] } - def deleteByIdAndCid(id: Ident, cid: Ident): ConnectionIO[Int] = { + def deleteByIdAndCid(id: Ident, cid: CollectiveId): ConnectionIO[Int] = { val u = RUser.T DML.delete(T, T.id === id && T.userId.in(Select(u.uid.s, from(u), u.cid === cid))) } diff --git a/modules/store/src/main/scala/docspell/store/records/RSource.scala b/modules/store/src/main/scala/docspell/store/records/RSource.scala index 629ae832..214660e9 100644 --- a/modules/store/src/main/scala/docspell/store/records/RSource.scala +++ b/modules/store/src/main/scala/docspell/store/records/RSource.scala @@ -17,7 +17,7 @@ import doobie.implicits._ case class RSource( sid: Ident, - cid: Ident, + cid: CollectiveId, abbrev: String, description: Option[String], counter: Int, @@ -40,7 +40,7 @@ object RSource { val tableName = "source" val sid = Column[Ident]("sid", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val abbrev = Column[String]("abbrev", this) val description = Column[String]("description", this) val counter = Column[Int]("counter", this) @@ -98,7 +98,7 @@ object RSource { ) ) - def incrementCounter(source: String, coll: Ident): ConnectionIO[Int] = + def incrementCounter(source: String, coll: CollectiveId): ConnectionIO[Int] = DML.update( table, where(table.abbrev === source, table.cid === coll), @@ -110,7 +110,7 @@ object RSource { sql.query[Int].unique.map(_ > 0) } - def existsByAbbrev(coll: Ident, abb: String): ConnectionIO[Boolean] = { + def existsByAbbrev(coll: CollectiveId, abb: String): ConnectionIO[Boolean] = { val sql = run( select(count(table.sid)), from(table), @@ -129,20 +129,20 @@ object RSource { run(select(table.cid), from(table), table.sid === sourceId).query[Ident].option def findAll( - coll: Ident, + coll: CollectiveId, order: Table => Column[_] ): ConnectionIO[Vector[RSource]] = findAllSql(coll, order).query[RSource].to[Vector] private[records] def findAllSql( - coll: Ident, + coll: CollectiveId, order: Table => Column[_] ): Fragment = { val t = RSource.as("s") Select(select(t.all), from(t), t.cid === coll).orderBy(order(t)).build } - def delete(sourceId: Ident, coll: Ident): ConnectionIO[Int] = + def delete(sourceId: Ident, coll: CollectiveId): ConnectionIO[Int] = DML.delete(table, where(table.sid === sourceId, table.cid === coll)) def removeFolder(folderId: Ident): ConnectionIO[Int] = { diff --git a/modules/store/src/main/scala/docspell/store/records/RTag.scala b/modules/store/src/main/scala/docspell/store/records/RTag.scala index b9e9ab27..fc66f6e8 100644 --- a/modules/store/src/main/scala/docspell/store/records/RTag.scala +++ b/modules/store/src/main/scala/docspell/store/records/RTag.scala @@ -18,7 +18,7 @@ import doobie.implicits._ case class RTag( tagId: Ident, - collective: Ident, + collective: CollectiveId, name: String, category: Option[String], created: Timestamp @@ -29,7 +29,7 @@ object RTag { val tableName = "tag" val tid = Column[Ident]("tid", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val name = Column[String]("name", this) val category = Column[String]("category", this) val created = Column[Timestamp]("created", this) @@ -62,7 +62,7 @@ object RTag { sql.query[RTag].option } - def findByIdAndCollective(id: Ident, coll: Ident): ConnectionIO[Option[RTag]] = { + def findByIdAndCollective(id: Ident, coll: CollectiveId): ConnectionIO[Option[RTag]] = { val sql = run(select(T.all), from(T), T.tid === id && T.cid === coll) sql.query[RTag].option } @@ -74,7 +74,7 @@ object RTag { } def findAll( - coll: Ident, + coll: CollectiveId, query: Option[String], order: Table => NonEmptyList[OrderBy] ): ConnectionIO[Vector[RTag]] = { @@ -121,7 +121,7 @@ object RTag { def findAllByNameOrId( nameOrIds: List[String], - coll: Ident + coll: CollectiveId ): ConnectionIO[Vector[RTag]] = { val idList = NonEmptyList.fromList(nameOrIds.flatMap(s => Ident.fromString(s).toOption)) @@ -142,7 +142,10 @@ object RTag { } } - def findOthers(coll: Ident, excludeTags: List[Ident]): ConnectionIO[List[RTag]] = { + def findOthers( + coll: CollectiveId, + excludeTags: List[Ident] + ): ConnectionIO[List[RTag]] = { val excl = NonEmptyList .fromList(excludeTags) @@ -155,14 +158,14 @@ object RTag { ).orderBy(T.name.asc).build.query[RTag].to[List] } - def listCategories(coll: Ident): ConnectionIO[List[String]] = + def listCategories(coll: CollectiveId): ConnectionIO[List[String]] = Select( T.category.s, from(T), T.cid === coll && T.category.isNotNull ).distinct.build.query[String].to[List] - def delete(tagId: Ident, coll: Ident): ConnectionIO[Int] = + def delete(tagId: Ident, coll: CollectiveId): ConnectionIO[Int] = DML.delete(T, T.tid === tagId && T.cid === coll) def sort(tags: List[RTag]): List[RTag] = diff --git a/modules/store/src/main/scala/docspell/store/records/RTagItem.scala b/modules/store/src/main/scala/docspell/store/records/RTagItem.scala index cb9467bd..0e6e627e 100644 --- a/modules/store/src/main/scala/docspell/store/records/RTagItem.scala +++ b/modules/store/src/main/scala/docspell/store/records/RTagItem.scala @@ -53,7 +53,7 @@ object RTagItem { def deleteItemTags(item: Ident): ConnectionIO[Int] = DML.delete(T, T.itemId === item) - def deleteItemTags(items: NonEmptyList[Ident], cid: Ident): ConnectionIO[Int] = + def deleteItemTags(items: NonEmptyList[Ident], cid: CollectiveId): ConnectionIO[Int] = DML.delete(T, T.itemId.in(RItem.filterItemsFragment(items, cid))) def deleteTag(tid: Ident): ConnectionIO[Int] = diff --git a/modules/store/src/main/scala/docspell/store/records/RTotp.scala b/modules/store/src/main/scala/docspell/store/records/RTotp.scala index 6b942852..00148b18 100644 --- a/modules/store/src/main/scala/docspell/store/records/RTotp.scala +++ b/modules/store/src/main/scala/docspell/store/records/RTotp.scala @@ -59,24 +59,15 @@ object RTotp { ) ) - def setEnabled(account: AccountId, enabled: Boolean): ConnectionIO[Int] = - for { - userId <- RUser.findIdByAccount(account) - n <- userId match { - case Some(id) => - DML.update(T, T.userId === id, DML.set(T.enabled.setTo(enabled))) - case None => - 0.pure[ConnectionIO] - } - } yield n + def setEnabled(userId: Ident, enabled: Boolean): ConnectionIO[Int] = + DML.update(T, T.userId === userId, DML.set(T.enabled.setTo(enabled))) - def isEnabled(accountId: AccountId): ConnectionIO[Boolean] = { + def isEnabled(userId: Ident): ConnectionIO[Boolean] = { val t = RTotp.as("t") - val u = RUser.as("u") Select( select(count(t.userId)), - from(t).innerJoin(u, t.userId === u.uid), - u.login === accountId.user && u.cid === accountId.collective && t.enabled === true + from(t), + t.userId === userId && t.enabled === true ).build.query[Int].unique.map(_ > 0) } @@ -86,24 +77,45 @@ object RTotp { ): ConnectionIO[Option[RTotp]] = { val t = RTotp.as("t") val u = RUser.as("u") + val c = RCollective.as("c") Select( select(t.all), - from(t).innerJoin(u, t.userId === u.uid), - u.login === accountId.user && u.cid === accountId.collective && t.enabled === enabled + from(t).innerJoin(u, t.userId === u.uid).innerJoin(c, c.id === u.cid), + u.login === accountId.user && c.name === accountId.collective && t.enabled === enabled + ).build.query[RTotp].option + } + + def findEnabledByUserId( + userId: Ident, + enabled: Boolean + ): ConnectionIO[Option[RTotp]] = { + val t = RTotp.as("t") + Select( + select(t.all), + from(t), + t.userId === userId && t.enabled === enabled ).build.query[RTotp].option } def existsByLogin(accountId: AccountId): ConnectionIO[Boolean] = { val t = RTotp.as("t") val u = RUser.as("u") + val c = RCollective.as("c") Select( select(count(t.userId)), - from(t).innerJoin(u, t.userId === u.uid), - u.login === accountId.user && u.cid === accountId.collective + from(t).innerJoin(u, t.userId === u.uid).innerJoin(c, c.id === u.cid), + u.login === accountId.user && c.name === accountId.collective ).build .query[Int] .unique .map(_ > 0) } + def existsByUserId(userId: Ident): ConnectionIO[Boolean] = { + val t = RTotp.as("t") + Select(select(count(t.userId)), from(t), t.userId === userId).build + .query[Int] + .unique + .map(_ > 0) + } } diff --git a/modules/store/src/main/scala/docspell/store/records/RUser.scala b/modules/store/src/main/scala/docspell/store/records/RUser.scala index 651c99fe..1e28c6f6 100644 --- a/modules/store/src/main/scala/docspell/store/records/RUser.scala +++ b/modules/store/src/main/scala/docspell/store/records/RUser.scala @@ -7,8 +7,6 @@ package docspell.store.records import cats.data.NonEmptyList -import cats.data.OptionT -import cats.effect.Sync import docspell.common._ import docspell.store.qb.DSL._ @@ -20,7 +18,7 @@ import doobie.implicits._ case class RUser( uid: Ident, login: Ident, - cid: Ident, + cid: CollectiveId, password: Password, state: UserState, source: AccountSource, @@ -29,8 +27,6 @@ case class RUser( lastLogin: Option[Timestamp], created: Timestamp ) { - def accountId: AccountId = - AccountId(cid, login) def idRef: IdRef = IdRef(uid, login.id) @@ -41,7 +37,7 @@ object RUser { def makeDefault( id: Ident, login: Ident, - collName: Ident, + collId: CollectiveId, password: Password, source: AccountSource, created: Timestamp @@ -49,7 +45,7 @@ object RUser { RUser( id, login, - collName, + collId, password, UserState.Active, source, @@ -64,7 +60,7 @@ object RUser { val uid = Column[Ident]("uid", this) val login = Column[Ident]("login", this) - val cid = Column[Ident]("cid", this) + val cid = Column[CollectiveId]("coll_id", this) val password = Column[Password]("password", this) val state = Column[UserState]("state", this) val source = Column[AccountSource]("account_source", this) @@ -73,9 +69,6 @@ object RUser { val lastLogin = Column[Timestamp]("lastlogin", this) val created = Column[Timestamp]("created", this) - def isAccount(aid: AccountId) = - cid === aid.collective && login === aid.user - val all = NonEmptyList.of[Column[_]]( uid, @@ -125,9 +118,14 @@ object RUser { } def findByAccount(aid: AccountId): ConnectionIO[Option[RUser]] = { - val t = Table(None) + val t = RUser.as("u") + val c = RCollective.as("c") val sql = - run(select(t.all), from(t), t.cid === aid.collective && t.login === aid.user) + run( + select(t.all), + from(t).innerJoin(c, c.id === t.cid), + c.name === aid.collective && t.login === aid.user + ) sql.query[RUser].option } @@ -137,20 +135,26 @@ object RUser { sql.query[RUser].option } - def findAll(coll: Ident, order: Table => Column[_]): ConnectionIO[Vector[RUser]] = { + def findAll( + coll: CollectiveId, + order: Table => Column[_] + ): ConnectionIO[Vector[RUser]] = { val t = Table(None) val sql = Select(select(t.all), from(t), t.cid === coll).orderBy(order(t)).build sql.query[RUser].to[Vector] } - def findIdByAccount(accountId: AccountId): ConnectionIO[Option[Ident]] = + def findIdByAccountId(accountId: AccountId): ConnectionIO[Option[Ident]] = { + val u = RUser.as("u") + val c = RCollective.as("c") run( - select(T.uid), - from(T), - T.login === accountId.user && T.cid === accountId.collective + select(u.uid), + from(u).innerJoin(c, c.id === u.cid), + u.login === accountId.user && c.name === accountId.collective ) .query[Ident] .option + } case class IdAndLogin(uid: Ident, login: Ident) def getIdByIdOrLogin(idOrLogin: Ident): ConnectionIO[Option[IdAndLogin]] = @@ -160,19 +164,19 @@ object RUser { T.uid === idOrLogin || T.login === idOrLogin ).build.query[IdAndLogin].option - def getIdByAccount(account: AccountId): ConnectionIO[Ident] = - OptionT(findIdByAccount(account)).getOrElseF( - Sync[ConnectionIO].raiseError( - new Exception(s"No user found for: ${account.asString}") - ) - ) +// def getIdByAccount(account: AccountId): ConnectionIO[Ident] = +// OptionT(findIdByAccount(account)).getOrElseF( +// Sync[ConnectionIO].raiseError( +// new Exception(s"No user found for: ${account.asString}") +// ) +// ) - def updateLogin(accountId: AccountId): ConnectionIO[Int] = { + def updateLogin(accountId: AccountInfo): ConnectionIO[Int] = { val t = Table(None) def stmt(now: Timestamp) = DML.update( t, - t.cid === accountId.collective && t.login === accountId.user, + t.cid === accountId.collectiveId && t.login === accountId.login, DML.set( t.loginCount.increment(1), t.lastLogin.setTo(now) @@ -181,16 +185,20 @@ object RUser { Timestamp.current[ConnectionIO].flatMap(stmt) } - def updatePassword(accountId: AccountId, hashedPass: Password): ConnectionIO[Int] = { + def updatePassword( + collId: CollectiveId, + userId: Ident, + hashedPass: Password + ): ConnectionIO[Int] = { val t = Table(None) DML.update( t, - t.cid === accountId.collective && t.login === accountId.user && t.source === AccountSource.Local, + t.cid === collId && t.uid === userId && t.source === AccountSource.Local, DML.set(t.password.setTo(hashedPass)) ) } - def delete(user: Ident, coll: Ident): ConnectionIO[Int] = { + def delete(user: Ident, coll: CollectiveId): ConnectionIO[Int] = { val t = Table(None) DML.delete(t, t.cid === coll && t.login === user) } diff --git a/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala b/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala index a11e330a..c8b68e28 100644 --- a/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala +++ b/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala @@ -184,11 +184,10 @@ object RUserEmail { } private def findByAccount0( - accId: AccountId, + userId: Ident, nameQ: Option[String], exact: Boolean ): Query0[RUserEmail] = { - val user = RUser.as("u") val email = as("m") val nameFilter = nameQ.map(s => @@ -197,43 +196,32 @@ object RUserEmail { val sql = Select( select(email.all), - from(email).innerJoin(user, email.uid === user.uid), - user.cid === accId.collective && user.login === accId.user &&? nameFilter + from(email), + email.uid === userId &&? nameFilter ).orderBy(email.name) sql.build.query[RUserEmail] } def findByAccount( - accId: AccountId, + userId: Ident, nameQ: Option[String] ): ConnectionIO[Vector[RUserEmail]] = - findByAccount0(accId, nameQ, false).to[Vector] + findByAccount0(userId, nameQ, false).to[Vector] - def getByName(accId: AccountId, name: Ident): ConnectionIO[Option[RUserEmail]] = - findByAccount0(accId, Some(name.id), true).option + def getByName(userId: Ident, name: Ident): ConnectionIO[Option[RUserEmail]] = + findByAccount0(userId, Some(name.id), true).option def getById(id: Ident): ConnectionIO[Option[RUserEmail]] = { val t = Table(None) run(select(t.all), from(t), t.id === id).query[RUserEmail].option } - def delete(accId: AccountId, connName: Ident): ConnectionIO[Int] = { - val user = RUser.as("u") - - val subsel = Select( - select(user.uid), - from(user), - user.cid === accId.collective && user.login === accId.user - ) - + def delete(userId: Ident, connName: Ident): ConnectionIO[Int] = { val t = Table(None) - DML.delete(t, t.uid.in(subsel) && t.name === connName) + DML.delete(t, t.uid === userId && t.name === connName) } - def exists(accId: AccountId, name: Ident): ConnectionIO[Boolean] = - getByName(accId, name).map(_.isDefined) - def exists(userId: Ident, connName: Ident): ConnectionIO[Boolean] = { val t = Table(None) run(select(count(t.id)), from(t), t.uid === userId && t.name === connName) diff --git a/modules/store/src/main/scala/docspell/store/records/RUserImap.scala b/modules/store/src/main/scala/docspell/store/records/RUserImap.scala index a6f1885c..ae4d3265 100644 --- a/modules/store/src/main/scala/docspell/store/records/RUserImap.scala +++ b/modules/store/src/main/scala/docspell/store/records/RUserImap.scala @@ -171,12 +171,11 @@ object RUserImap { } private def findByAccount0( - accId: AccountId, + userId: Ident, nameQ: Option[String], exact: Boolean ): Query0[RUserImap] = { val m = RUserImap.as("m") - val u = RUser.as("u") val nameFilter = nameQ.map { str => @@ -186,37 +185,37 @@ object RUserImap { val sql = Select( select(m.all), - from(m).innerJoin(u, m.uid === u.uid), - u.cid === accId.collective && u.login === accId.user &&? nameFilter + from(m), + m.uid === userId &&? nameFilter ).orderBy(m.name).build sql.query[RUserImap] } def findByAccount( - accId: AccountId, + userId: Ident, nameQ: Option[String] ): ConnectionIO[Vector[RUserImap]] = - findByAccount0(accId, nameQ, false).to[Vector] + findByAccount0(userId, nameQ, false).to[Vector] - def getByName(accId: AccountId, name: Ident): ConnectionIO[Option[RUserImap]] = - findByAccount0(accId, Some(name.id), true).option + def getByName( + userId: Ident, + name: Ident + ): ConnectionIO[Option[RUserImap]] = + findByAccount0(userId, Some(name.id), true).option - def delete(accId: AccountId, connName: Ident): ConnectionIO[Int] = { + def delete( + userId: Ident, + connName: Ident + ): ConnectionIO[Int] = { val t = Table(None) - val u = RUser.as("u") - val subsel = - Select(select(u.uid), from(u), u.cid === accId.collective && u.login === accId.user) DML.delete( t, - t.uid.in(subsel) && t.name === connName + t.uid === userId && t.name === connName ) } - def exists(accId: AccountId, name: Ident): ConnectionIO[Boolean] = - getByName(accId, name).map(_.isDefined) - def exists(userId: Ident, connName: Ident): ConnectionIO[Boolean] = { val t = Table(None) run(select(count(t.id)), from(t), t.uid === userId && t.name === connName) diff --git a/modules/store/src/main/scala/docspell/store/records/SourceData.scala b/modules/store/src/main/scala/docspell/store/records/SourceData.scala index aee01672..27fa86e5 100644 --- a/modules/store/src/main/scala/docspell/store/records/SourceData.scala +++ b/modules/store/src/main/scala/docspell/store/records/SourceData.scala @@ -26,7 +26,7 @@ object SourceData { SourceData(s, Vector.empty) def findAll( - coll: Ident, + coll: CollectiveId, order: RSource.Table => Column[_] ): Stream[ConnectionIO, SourceData] = findAllWithTags(RSource.findAllSql(coll, order).query[RSource].stream) @@ -83,7 +83,7 @@ object SourceData { ) } yield n0 + n1.sum - def delete(source: Ident, coll: Ident): ConnectionIO[Int] = + def delete(source: Ident, coll: CollectiveId): ConnectionIO[Int] = for { n0 <- RTagSource.deleteSourceTags(source) n1 <- RSource.delete(source, coll) diff --git a/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala b/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala index 0b3c9aa5..ad081354 100644 --- a/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala +++ b/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala @@ -6,22 +6,19 @@ package docspell.store.fts -import java.time.LocalDate - +import java.time.{Instant, LocalDate} import cats.effect.IO import cats.syntax.option._ import cats.syntax.traverse._ import fs2.Stream - import docspell.common._ import docspell.ftsclient.FtsResult import docspell.ftsclient.FtsResult.{AttachmentData, ItemMatch} import docspell.store._ import docspell.store.qb.DSL._ import docspell.store.qb._ -import docspell.store.queries.{QItem, Query} -import docspell.store.records.{RCollective, RItem} - +import docspell.store.queries.{QItem, QLogin, Query} +import docspell.store.records.{RCollective, RItem, RUser} import doobie._ class TempFtsOpsTest extends DatabaseTest { @@ -60,9 +57,10 @@ class TempFtsOpsTest extends DatabaseTest { def prepareItems(store: Store[IO]) = for { - _ <- store.transact(RCollective.insert(makeCollective(DocspellSystem.user))) + _ <- store.transact(RCollective.insert(makeCollective(CollectiveId(2)))) + _ <- store.transact(RUser.insert(makeUser(CollectiveId(2)))) items = (0 until 200) - .map(makeItem(_, DocspellSystem.user)) + .map(makeItem(_, CollectiveId(2))) .toList _ <- items.traverse(i => store.transact(RItem.insert(i))) } yield () @@ -100,7 +98,9 @@ class TempFtsOpsTest extends DatabaseTest { def assertQueryItem(store: Store[IO], ftsResults: Stream[ConnectionIO, FtsResult]) = for { today <- IO(LocalDate.now()) - account = DocspellSystem.account + account <- store + .transact(QLogin.findUser(DocspellSystem.account)) + .map(_.get.account) tempTable = ftsResults .through(TempFtsOps.prepareTable(store.dbms, "fts_result")) .compile @@ -170,10 +170,31 @@ class TempFtsOpsTest extends DatabaseTest { } } - def makeCollective(cid: Ident): RCollective = - RCollective(cid, CollectiveState.Active, Language.English, true, ts) + def makeUser(cid: CollectiveId): RUser = + RUser( + Ident.unsafe("uid1"), + DocspellSystem.account.user, + cid, + Password("test"), + UserState.Active, + AccountSource.Local, + None, + 0, + None, + Timestamp(Instant.now) + ) - def makeItem(n: Int, cid: Ident): RItem = + def makeCollective(cid: CollectiveId): RCollective = + RCollective( + cid, + DocspellSystem.account.collective, + CollectiveState.Active, + Language.English, + true, + ts + ) + + def makeItem(n: Int, cid: CollectiveId): RItem = RItem( id(s"item-$n"), cid, diff --git a/modules/store/src/test/scala/docspell/store/generator/ItemQueryGeneratorTest.scala b/modules/store/src/test/scala/docspell/store/generator/ItemQueryGeneratorTest.scala index 9f9a9995..ea209c94 100644 --- a/modules/store/src/test/scala/docspell/store/generator/ItemQueryGeneratorTest.scala +++ b/modules/store/src/test/scala/docspell/store/generator/ItemQueryGeneratorTest.scala @@ -40,7 +40,7 @@ class ItemQueryGeneratorTest extends FunSuite { test("basic test") { val q = ItemQueryParser .parseUnsafe("(& name:hello date>=2020-02-01 (| source:expense* folder=test ))") - val cond = ItemQueryGenerator(now, tables, Ident.unsafe("coll"))(q) + val cond = ItemQueryGenerator(now, tables, CollectiveId(1))(q) val expect = tables.item.name.like("hello") && coalesce(tables.item.itemDate.s, tables.item.created.s) >= @@ -52,14 +52,14 @@ class ItemQueryGeneratorTest extends FunSuite { test("!conc:*") { val q = ItemQueryParser.parseUnsafe("!conc:*") - val cond = ItemQueryGenerator(now, tables, Ident.unsafe("coll"))(q) + val cond = ItemQueryGenerator(now, tables, CollectiveId(1))(q) val expect = not(tables.concPers.name.like("%") || tables.concEquip.name.like("%")) assertEquals(cond, expect) } test("attach.id with wildcard") { val q = ItemQueryParser.parseUnsafe("attach.id=abcde*") - val cond = ItemQueryGenerator(now, tables, Ident.unsafe("coll"))(q) + val cond = ItemQueryGenerator(now, tables, CollectiveId(1))(q) val expect = tables.item.id.in( Select( select(RAttachment.T.itemId), @@ -73,7 +73,7 @@ class ItemQueryGeneratorTest extends FunSuite { test("attach.id with equals") { val q = ItemQueryParser.parseUnsafe("attach.id=abcde") - val cond = ItemQueryGenerator(now, tables, Ident.unsafe("coll"))(q) + val cond = ItemQueryGenerator(now, tables, CollectiveId(1))(q) val expect = tables.item.id.in( Select( select(RAttachment.T.itemId), diff --git a/modules/store/src/test/scala/docspell/store/migrate/MigrateTest.scala b/modules/store/src/test/scala/docspell/store/migrate/MigrateTest.scala index cb34fb18..0594578a 100644 --- a/modules/store/src/test/scala/docspell/store/migrate/MigrateTest.scala +++ b/modules/store/src/test/scala/docspell/store/migrate/MigrateTest.scala @@ -7,9 +7,7 @@ package docspell.store.migrate import cats.effect._ - import docspell.store.{DatabaseTest, SchemaMigrateConfig, StoreFixture} - import org.flywaydb.core.api.output.MigrateResult class MigrateTest extends DatabaseTest { From 26d7c91266e79e1d47a31ae04070dd65d03d9d0a Mon Sep 17 00:00:00 2001 From: eikek Date: Wed, 13 Jul 2022 23:37:46 +0200 Subject: [PATCH 03/15] Adopt modules to new collective table --- .../notification/api/EventContext.scala | 2 +- .../notification/impl/context/BasicData.scala | 2 +- .../impl/context/DeleteFieldValueCtx.scala | 4 +- .../impl/context/ItemSelectionCtx.scala | 10 +-- .../impl/context/SetFieldValueCtx.scala | 4 +- .../impl/context/TagsChangedCtx.scala | 6 +- .../docspell/scheduler/FindJobOwner.scala | 19 ++++++ .../impl/JobStoreModuleBuilder.scala | 21 +++++-- .../scheduler/impl/JobStorePublish.scala | 36 ++++++----- .../scheduler/impl/SchedulerBuilder.scala | 12 +++- .../scheduler/impl/SchedulerImpl.scala | 61 +++++++++++-------- 11 files changed, 113 insertions(+), 64 deletions(-) create mode 100644 modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala diff --git a/modules/notification/api/src/main/scala/docspell/notification/api/EventContext.scala b/modules/notification/api/src/main/scala/docspell/notification/api/EventContext.scala index c51497ad..61fd0964 100644 --- a/modules/notification/api/src/main/scala/docspell/notification/api/EventContext.scala +++ b/modules/notification/api/src/main/scala/docspell/notification/api/EventContext.scala @@ -26,7 +26,7 @@ trait EventContext { "account" -> Json.obj( "collective" -> event.account.collective.asJson, "user" -> event.account.login.asJson, - "login" -> event.account.asJson + "login" -> event.account.asAccountId.asJson ), "content" -> content ) diff --git a/modules/notification/impl/src/main/scala/docspell/notification/impl/context/BasicData.scala b/modules/notification/impl/src/main/scala/docspell/notification/impl/context/BasicData.scala index 9c552969..94f15cdf 100644 --- a/modules/notification/impl/src/main/scala/docspell/notification/impl/context/BasicData.scala +++ b/modules/notification/impl/src/main/scala/docspell/notification/impl/context/BasicData.scala @@ -70,7 +70,7 @@ object BasicData { def find( itemIds: NonEmptyList[Ident], - account: AccountId, + account: AccountInfo, now: Timestamp ): ConnectionIO[Vector[Item]] = { import ItemQueryDsl._ diff --git a/modules/notification/impl/src/main/scala/docspell/notification/impl/context/DeleteFieldValueCtx.scala b/modules/notification/impl/src/main/scala/docspell/notification/impl/context/DeleteFieldValueCtx.scala index b204f823..0f87c4fb 100644 --- a/modules/notification/impl/src/main/scala/docspell/notification/impl/context/DeleteFieldValueCtx.scala +++ b/modules/notification/impl/src/main/scala/docspell/notification/impl/context/DeleteFieldValueCtx.scala @@ -46,7 +46,7 @@ object DeleteFieldValueCtx { for { now <- OptionT.liftF(Timestamp.current[ConnectionIO]) items <- OptionT.liftF(Item.find(ev.items, ev.account, now)) - field <- OptionT(RCustomField.findById(ev.field, ev.account.collective)) + field <- OptionT(RCustomField.findById(ev.field, ev.account.collectiveId)) msg = DeleteFieldValueCtx( ev, Data( @@ -71,7 +71,7 @@ object DeleteFieldValueCtx { ) final case class Data( - account: AccountId, + account: AccountInfo, items: List[Item], field: Field, itemUrl: Option[String] diff --git a/modules/notification/impl/src/main/scala/docspell/notification/impl/context/ItemSelectionCtx.scala b/modules/notification/impl/src/main/scala/docspell/notification/impl/context/ItemSelectionCtx.scala index 994ab1b9..fba4a38e 100644 --- a/modules/notification/impl/src/main/scala/docspell/notification/impl/context/ItemSelectionCtx.scala +++ b/modules/notification/impl/src/main/scala/docspell/notification/impl/context/ItemSelectionCtx.scala @@ -61,7 +61,7 @@ object ItemSelectionCtx { items.toList, ev.itemUrl, ev.more, - ev.account.user.id + ev.account.login.id ) ) } yield msg @@ -73,12 +73,12 @@ object ItemSelectionCtx { items <- ev.items.traverse(Item.sample[F]) } yield ItemSelectionCtx( ev, - Data(ev.account, items.toList, ev.itemUrl, ev.more, ev.account.user.id) + Data(ev.account, items.toList, ev.itemUrl, ev.more, ev.account.login.id) ) ) final case class Data( - account: AccountId, + account: AccountInfo, items: List[Item], itemUrl: Option[String], more: Boolean, @@ -89,7 +89,7 @@ object ItemSelectionCtx { io.circe.generic.semiauto.deriveEncoder def create( - account: AccountId, + account: AccountInfo, items: Vector[ListItem], baseUrl: Option[LenientUri], more: Boolean, @@ -100,7 +100,7 @@ object ItemSelectionCtx { items.map(Item(now)).toList, baseUrl.map(_.asString), more, - account.user.id + account.login.id ) } diff --git a/modules/notification/impl/src/main/scala/docspell/notification/impl/context/SetFieldValueCtx.scala b/modules/notification/impl/src/main/scala/docspell/notification/impl/context/SetFieldValueCtx.scala index ebb8dc70..ced15467 100644 --- a/modules/notification/impl/src/main/scala/docspell/notification/impl/context/SetFieldValueCtx.scala +++ b/modules/notification/impl/src/main/scala/docspell/notification/impl/context/SetFieldValueCtx.scala @@ -44,7 +44,7 @@ object SetFieldValueCtx { for { now <- OptionT.liftF(Timestamp.current[ConnectionIO]) items <- OptionT.liftF(Item.find(ev.items, ev.account, now)) - field <- OptionT(RCustomField.findById(ev.field, ev.account.collective)) + field <- OptionT(RCustomField.findById(ev.field, ev.account.collectiveId)) msg = SetFieldValueCtx( ev, Data( @@ -70,7 +70,7 @@ object SetFieldValueCtx { ) final case class Data( - account: AccountId, + account: AccountInfo, items: List[Item], field: Field, value: String, diff --git a/modules/notification/impl/src/main/scala/docspell/notification/impl/context/TagsChangedCtx.scala b/modules/notification/impl/src/main/scala/docspell/notification/impl/context/TagsChangedCtx.scala index bd7aa8d4..f674ecf1 100644 --- a/modules/notification/impl/src/main/scala/docspell/notification/impl/context/TagsChangedCtx.scala +++ b/modules/notification/impl/src/main/scala/docspell/notification/impl/context/TagsChangedCtx.scala @@ -39,8 +39,8 @@ object TagsChangedCtx { def apply: Factory = EventContext.factory(ev => for { - tagsAdded <- RTag.findAllByNameOrId(ev.added, ev.account.collective) - tagsRemov <- RTag.findAllByNameOrId(ev.removed, ev.account.collective) + tagsAdded <- RTag.findAllByNameOrId(ev.added, ev.account.collectiveId) + tagsRemov <- RTag.findAllByNameOrId(ev.removed, ev.account.collectiveId) now <- Timestamp.current[ConnectionIO] items <- Item.find(ev.items, ev.account, now) msg = TagsChangedCtx( @@ -69,7 +69,7 @@ object TagsChangedCtx { ) final case class Data( - account: AccountId, + account: AccountInfo, items: List[Item], added: List[Tag], removed: List[Tag], diff --git a/modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala b/modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala new file mode 100644 index 00000000..66578f53 --- /dev/null +++ b/modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala @@ -0,0 +1,19 @@ +package docspell.scheduler + +import cats.Applicative +import docspell.common.AccountInfo + +/** Strategy to find the user that submitted the job. This is used to emit events about + * starting/finishing jobs. + * + * If an account cannot be determined, no events can be send. + */ +trait FindJobOwner[F[_]] { + def apply(job: Job[_]): F[Option[AccountInfo]] +} + +object FindJobOwner { + + def none[F[_]: Applicative]: FindJobOwner[F] = + (_: Job[_]) => Applicative[F].pure(None) +} diff --git a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStoreModuleBuilder.scala b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStoreModuleBuilder.scala index 0cab5e30..b62ae5cd 100644 --- a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStoreModuleBuilder.scala +++ b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStoreModuleBuilder.scala @@ -17,7 +17,8 @@ import docspell.store.Store case class JobStoreModuleBuilder[F[_]: Async]( store: Store[F], pubsub: PubSubT[F], - eventSink: EventSink[F] + eventSink: EventSink[F], + findJobOwner: FindJobOwner[F] ) { def withPubsub(ps: PubSubT[F]): JobStoreModuleBuilder[F] = copy(pubsub = ps) @@ -25,8 +26,11 @@ case class JobStoreModuleBuilder[F[_]: Async]( def withEventSink(es: EventSink[F]): JobStoreModuleBuilder[F] = copy(eventSink = es) + def withFindJobOwner(f: FindJobOwner[F]): JobStoreModuleBuilder[F] = + copy(findJobOwner = f) + def build: JobStoreModuleBuilder.Module[F] = { - val jobStore = JobStorePublish(store, pubsub, eventSink) + val jobStore = JobStorePublish(store, pubsub, eventSink, findJobOwner) val periodicTaskStore = PeriodicTaskStore(store, jobStore) val userTaskStore = UserTaskStoreImpl(store, periodicTaskStore) new JobStoreModuleBuilder.Module( @@ -35,7 +39,8 @@ case class JobStoreModuleBuilder[F[_]: Async]( jobStore, store, eventSink, - pubsub + pubsub, + findJobOwner ) } } @@ -43,7 +48,12 @@ case class JobStoreModuleBuilder[F[_]: Async]( object JobStoreModuleBuilder { def apply[F[_]: Async](store: Store[F]): JobStoreModuleBuilder[F] = - JobStoreModuleBuilder(store, PubSubT.noop[F], EventSink.silent[F]) + JobStoreModuleBuilder( + store, + PubSubT.noop[F], + EventSink.silent[F], + FindJobOwner.none[F] + ) final class Module[F[_]]( val userTasks: UserTaskStore[F], @@ -51,6 +61,7 @@ object JobStoreModuleBuilder { val jobs: JobStore[F], val store: Store[F], val eventSink: EventSink[F], - val pubSubT: PubSubT[F] + val pubSubT: PubSubT[F], + val findJobOwner: FindJobOwner[F] ) extends JobStoreModule[F] {} } diff --git a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStorePublish.scala b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStorePublish.scala index 3d88e322..4bb82b99 100644 --- a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStorePublish.scala +++ b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStorePublish.scala @@ -6,9 +6,9 @@ package docspell.scheduler.impl +import cats.data.OptionT import cats.effect._ import cats.implicits._ - import docspell.common.{Ident, JobState} import docspell.notification.api.{Event, EventSink} import docspell.pubsub.api.PubSubT @@ -19,26 +19,31 @@ import docspell.store.Store final class JobStorePublish[F[_]: Sync]( delegate: JobStore[F], pubsub: PubSubT[F], - eventSink: EventSink[F] + eventSink: EventSink[F], + findJobOwner: FindJobOwner[F] ) extends JobStore[F] { private def msg(job: Job[String]): JobSubmitted = JobSubmitted(job.id, job.group, job.task, job.args) - private def event(job: Job[String]): Event.JobSubmitted = - Event.JobSubmitted( - job.id, - job.group, - job.task, - job.args, - JobState.waiting, - job.subject, - job.submitter - ) + private def event(job: Job[String]): OptionT[F, Event.JobSubmitted] = + OptionT(findJobOwner(job)) + .map( + Event.JobSubmitted( + _, + job.id, + job.group, + job.task, + job.args, + JobState.waiting, + job.subject, + job.submitter + ) + ) private def publish(job: Job[String]): F[Unit] = pubsub.publish1(JobSubmitted.topic, msg(job)).as(()) *> - eventSink.offer(event(job)) + event(job).semiflatMap(eventSink.offer).value.void private def notifyJoex: F[Unit] = pubsub.publish1IgnoreErrors(JobsNotify(), ()).void @@ -82,7 +87,8 @@ object JobStorePublish { def apply[F[_]: Async]( store: Store[F], pubSub: PubSubT[F], - eventSink: EventSink[F] + eventSink: EventSink[F], + findJobOwner: FindJobOwner[F] ): JobStore[F] = - new JobStorePublish[F](JobStoreImpl(store), pubSub, eventSink) + new JobStorePublish[F](JobStoreImpl(store), pubSub, eventSink, findJobOwner) } diff --git a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerBuilder.scala b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerBuilder.scala index 6a6e0c2a..f2def0e1 100644 --- a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerBuilder.scala +++ b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerBuilder.scala @@ -23,7 +23,8 @@ case class SchedulerBuilder[F[_]: Async]( queue: JobQueue[F], logSink: LogSink[F], pubSub: PubSubT[F], - eventSink: EventSink[F] + eventSink: EventSink[F], + findJobOwner: FindJobOwner[F] ) { def withConfig(cfg: SchedulerConfig): SchedulerBuilder[F] = @@ -32,7 +33,7 @@ case class SchedulerBuilder[F[_]: Async]( def withTaskRegistry(reg: JobTaskRegistry[F]): SchedulerBuilder[F] = copy(tasks = reg) - def withTask[A](task: JobTask[F]): SchedulerBuilder[F] = + def withTask(task: JobTask[F]): SchedulerBuilder[F] = withTaskRegistry(tasks.withTask(task)) def withLogSink(sink: LogSink[F]): SchedulerBuilder[F] = @@ -47,6 +48,9 @@ case class SchedulerBuilder[F[_]: Async]( def withEventSink(sink: EventSink[F]): SchedulerBuilder[F] = copy(eventSink = sink) + def withFindJobOwner(f: FindJobOwner[F]): SchedulerBuilder[F] = + copy(findJobOwner = f) + def serve: Resource[F, Scheduler[F]] = resource.evalMap(sch => Async[F].start(sch.start.compile.drain).map(_ => sch)) @@ -60,6 +64,7 @@ case class SchedulerBuilder[F[_]: Async]( queue, pubSub, eventSink, + findJobOwner, tasks, store, logSink, @@ -86,6 +91,7 @@ object SchedulerBuilder { JobQueue(store), LogSink.db[F](store), PubSubT.noop[F], - EventSink.silent[F] + EventSink.silent[F], + FindJobOwner.none[F] ) } diff --git a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerImpl.scala b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerImpl.scala index db6fc5f1..d39b26d7 100644 --- a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerImpl.scala +++ b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerImpl.scala @@ -30,6 +30,7 @@ final class SchedulerImpl[F[_]: Async]( queue: JobQueue[F], pubSub: PubSubT[F], eventSink: EventSink[F], + findJobOwner: FindJobOwner[F], tasks: JobTaskRegistry[F], store: Store[F], logSink: LogSink[F], @@ -68,20 +69,19 @@ final class SchedulerImpl[F[_]: Async]( def getRunning: F[Vector[Job[String]]] = state.get .flatMap(s => QJob.findAll(s.getRunning, store)) - .map( - _.map(rj => - Job( - rj.id, - rj.task, - rj.group, - rj.args, - rj.subject, - rj.submitter, - rj.priority, - rj.tracker - ) - ) - ) + .map(_.map(convertJob)) + + private def convertJob(rj: RJob): Job[String] = + Job( + rj.id, + rj.task, + rj.group, + rj.args, + rj.subject, + rj.submitter, + rj.priority, + rj.tracker + ) def requestCancel(jobId: Ident): F[Boolean] = logger.info(s"Scheduler requested to cancel job: ${jobId.id}") *> @@ -235,22 +235,29 @@ final class SchedulerImpl[F[_]: Async]( ) ) _ <- Sync[F].whenA(JobState.isDone(finishState))( - eventSink.offer( - Event.JobDone( - job.id, - job.group, - job.task, - job.args, - job.state, - job.subject, - job.submitter, - result.json.getOrElse(Json.Null), - result.message - ) - ) + makeJobDoneEvent(job, result) + .semiflatMap(eventSink.offer) + .value ) } yield () + private def makeJobDoneEvent(job: RJob, result: JobTaskResult) = + for { + acc <- OptionT(findJobOwner(convertJob(job))) + ev = Event.JobDone( + acc, + job.id, + job.group, + job.task, + job.args, + job.state, + job.subject, + job.submitter, + result.json.getOrElse(Json.Null), + result.message + ) + } yield ev + def onStart(job: RJob): F[Unit] = QJob.setRunning( job.id, From 53d92c4a2609b04b1a4563eb2b84eb4e5a4c38d0 Mon Sep 17 00:00:00 2001 From: eikek Date: Thu, 4 Aug 2022 11:03:27 +0200 Subject: [PATCH 04/15] Adopt backend to collective-id --- .../scala/docspell/addons/out/NewFile.scala | 6 +- .../scala/docspell/addons/out/NewItem.scala | 4 +- .../docspell/backend/AttachedEvent.scala | 8 +- .../docspell/backend/BackendCommands.scala | 43 ++-- .../scala/docspell/backend/JobFactory.scala | 88 ++++----- .../scala/docspell/backend/auth/Login.scala | 2 +- .../backend/fulltext/CreateIndex.scala | 4 +- .../scala/docspell/backend/item/Merge.scala | 9 +- .../docspell/backend/joex/AddonOps.scala | 21 +- .../backend/joex/AddonPostProcess.scala | 42 ++-- .../docspell/backend/joex/AddonPrepare.scala | 12 +- .../backend/joex/FindJobOwnerAccount.scala | 17 ++ .../backend/ops/AddonRunConfigValidate.scala | 7 +- .../docspell/backend/ops/AddonValidate.scala | 7 +- .../scala/docspell/backend/ops/OAddons.scala | 57 +++--- .../docspell/backend/ops/OAttachment.scala | 27 ++- .../backend/ops/OClientSettings.scala | 60 +++--- .../docspell/backend/ops/OCollective.scala | 144 ++++++++------ .../docspell/backend/ops/OCustomFields.scala | 18 +- .../docspell/backend/ops/ODownloadAll.scala | 22 +-- .../docspell/backend/ops/OEquipment.scala | 19 +- .../scala/docspell/backend/ops/OFolder.scala | 65 +++--- .../docspell/backend/ops/OFulltext.scala | 8 +- .../scala/docspell/backend/ops/OItem.scala | 185 ++++++++++-------- .../docspell/backend/ops/OItemLink.scala | 31 ++- .../docspell/backend/ops/OItemSearch.scala | 54 +++-- .../scala/docspell/backend/ops/OMail.scala | 117 ++++++----- .../docspell/backend/ops/ONotification.scala | 84 ++++---- .../docspell/backend/ops/OOrganization.scala | 48 ++--- .../scala/docspell/backend/ops/OShare.scala | 45 +++-- .../scala/docspell/backend/ops/OSource.scala | 12 +- .../scala/docspell/backend/ops/OTag.scala | 14 +- .../scala/docspell/backend/ops/OUpload.scala | 58 ++++-- .../docspell/backend/ops/search/OSearch.scala | 12 +- .../docspell/backend/signup/OSignup.scala | 115 ++++++----- .../docspell/backend/auth/AuthTokenTest.scala | 14 +- .../docspell/common/AllPreviewsArgs.scala | 2 +- .../scala/docspell/common/CollectiveId.scala | 12 ++ .../docspell/common/ConvertAllPdfArgs.scala | 2 +- .../docspell/common/EmptyTrashArgs.scala | 7 +- .../scala/docspell/common/FileCategory.scala | 2 +- .../main/scala/docspell/common/FileKey.scala | 4 +- .../main/scala/docspell/common/Ident.scala | 3 + .../docspell/common/ItemAddonTaskArgs.scala | 2 +- .../docspell/common/LearnClassifierArgs.scala | 3 +- .../docspell/common/ProcessItemArgs.scala | 5 +- .../docspell/common/ReIndexTaskArgs.scala | 4 +- .../common/ScheduledAddonTaskArgs.scala | 2 +- .../common/bc/BackendCommandRunner.scala | 6 +- .../scala/docspell/ftsclient/FtsClient.scala | 14 +- .../scala/docspell/ftsclient/FtsQuery.scala | 4 +- .../scala/docspell/ftsclient/TextData.scala | 10 +- .../db/psqlfts/V2.1.0__collective_id.sql | 33 ++++ .../scala/docspell/ftspsql/DoobieMeta.scala | 2 + .../scala/docspell/ftspsql/FtsRecord.scala | 5 +- .../docspell/ftspsql/FtsRepository.scala | 10 +- .../docspell/ftspsql/PsqlFtsClient.scala | 18 +- .../scala/docspell/ftspsql/PgFixtures.scala | 4 +- .../scala/docspell/ftssolr/JsonCodec.scala | 12 +- .../scala/docspell/ftssolr/QueryData.scala | 2 +- .../docspell/ftssolr/SolrFtsClient.scala | 10 +- .../scala/docspell/ftssolr/SolrSetup.scala | 3 +- .../scala/docspell/ftssolr/SolrUpdate.scala | 23 ++- .../scala/docspell/joex/JoexAppImpl.scala | 4 +- .../docspell/restserver/RestAppImpl.scala | 4 +- .../docspell/scheduler/FindJobOwner.scala | 9 +- .../main/scala/docspell/scheduler/Job.scala | 9 +- .../scheduler/usertask/UserTaskScope.scala | 59 ++++-- .../docspell/scheduler/impl/QUserTask.scala | 44 ++--- .../impl/SchedulerModuleBuilder.scala | 1 + .../scala/db/migration/MigrationTasks.scala | 8 +- .../db/migration/data/AllPreviewsArgs.scala | 35 ++++ .../db/migration/data/ConvertAllPdfArgs.scala | 34 ++++ .../db/migration/data/EmptyTrashArgs.scala | 52 +++++ .../db/migration/data/ItemAddonTaskArgs.scala | 32 +++ .../migration/data/LearnClassifierArgs.scala | 45 +++++ .../db/migration/data/ProcessItemArgs.scala | 82 ++++++++ .../db/migration/data/ReIndexTaskArgs.scala | 34 ++++ .../data/ScheduledAddonTaskArgs.scala | 26 +++ .../docspell/store/file/BinnyUtils.scala | 12 +- .../docspell/store/file/FileRepository.scala | 2 +- .../store/file/FileRepositoryImpl.scala | 4 +- .../docspell/store/file/FileUrlReader.scala | 2 +- .../scala/docspell/store/queries/QItem.scala | 2 +- .../scala/docspell/store/queries/QLogin.scala | 12 ++ .../docspell/store/records/RCollective.scala | 28 ++- .../scala/docspell/store/records/RItem.scala | 4 +- .../docspell/store/records/RSentMail.scala | 5 +- .../scala/docspell/store/records/RShare.scala | 32 +-- .../docspell/store/records/RSource.scala | 4 +- .../scala/docspell/store/records/RUser.scala | 15 +- .../docspell/store/records/RUserEmail.scala | 7 +- .../docspell/store/records/RUserImap.scala | 7 +- .../docspell/store/fts/TempFtsOpsTest.scala | 4 +- 94 files changed, 1468 insertions(+), 833 deletions(-) create mode 100644 modules/backend/src/main/scala/docspell/backend/joex/FindJobOwnerAccount.scala create mode 100644 modules/fts-psql/src/main/resources/db/psqlfts/V2.1.0__collective_id.sql create mode 100644 modules/store/src/main/scala/db/migration/data/AllPreviewsArgs.scala create mode 100644 modules/store/src/main/scala/db/migration/data/ConvertAllPdfArgs.scala create mode 100644 modules/store/src/main/scala/db/migration/data/EmptyTrashArgs.scala create mode 100644 modules/store/src/main/scala/db/migration/data/ItemAddonTaskArgs.scala create mode 100644 modules/store/src/main/scala/db/migration/data/LearnClassifierArgs.scala create mode 100644 modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala create mode 100644 modules/store/src/main/scala/db/migration/data/ReIndexTaskArgs.scala create mode 100644 modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala diff --git a/modules/addonlib/src/main/scala/docspell/addons/out/NewFile.scala b/modules/addonlib/src/main/scala/docspell/addons/out/NewFile.scala index 4807fe9c..86d4ed05 100644 --- a/modules/addonlib/src/main/scala/docspell/addons/out/NewFile.scala +++ b/modules/addonlib/src/main/scala/docspell/addons/out/NewFile.scala @@ -9,12 +9,10 @@ package docspell.addons.out import cats.effect.Sync import cats.syntax.all._ import fs2.io.file.{Files, Path} - import docspell.addons.out.NewFile.Meta import docspell.common.ProcessItemArgs.ProcessMeta -import docspell.common.{Ident, Language} +import docspell.common.{CollectiveId, Ident, Language} import docspell.logging.Logger - import io.circe.Codec import io.circe.generic.extras.Configuration import io.circe.generic.extras.semiauto.deriveConfiguredCodec @@ -45,7 +43,7 @@ object NewFile { ) { def toProcessMeta( - cid: Ident, + cid: CollectiveId, itemId: Ident, collLang: Option[Language], sourceAbbrev: String diff --git a/modules/addonlib/src/main/scala/docspell/addons/out/NewItem.scala b/modules/addonlib/src/main/scala/docspell/addons/out/NewItem.scala index c5511fd0..2ee79161 100644 --- a/modules/addonlib/src/main/scala/docspell/addons/out/NewItem.scala +++ b/modules/addonlib/src/main/scala/docspell/addons/out/NewItem.scala @@ -20,7 +20,7 @@ import io.circe.{Decoder, Encoder} case class NewItem(metadata: Option[Meta], files: List[String]) { def toProcessMeta( - cid: Ident, + cid: CollectiveId, collLang: Option[Language], sourceAbbrev: String ): ProcessItemArgs.ProcessMeta = @@ -62,7 +62,7 @@ object NewItem { ) { def toProcessArgs( - cid: Ident, + cid: CollectiveId, collLang: Option[Language], sourceAbbrev: String ): ProcessItemArgs.ProcessMeta = diff --git a/modules/backend/src/main/scala/docspell/backend/AttachedEvent.scala b/modules/backend/src/main/scala/docspell/backend/AttachedEvent.scala index 386e2a4a..0e214afc 100644 --- a/modules/backend/src/main/scala/docspell/backend/AttachedEvent.scala +++ b/modules/backend/src/main/scala/docspell/backend/AttachedEvent.scala @@ -13,7 +13,7 @@ trait AttachedEvent[R] { def value: R - def event(account: AccountId, baseUrl: Option[LenientUri]): Iterable[Event] + def event(account: AccountInfo, baseUrl: Option[LenientUri]): Iterable[Event] def map[U](f: R => U): AttachedEvent[U] } @@ -24,7 +24,7 @@ object AttachedEvent { def only[R](v: R): AttachedEvent[R] = new AttachedEvent[R] { val value = v - def event(account: AccountId, baseUrl: Option[LenientUri]): Iterable[Event] = + def event(account: AccountInfo, baseUrl: Option[LenientUri]): Iterable[Event] = Iterable.empty[Event] def map[U](f: R => U): AttachedEvent[U] = @@ -33,10 +33,10 @@ object AttachedEvent { def apply[R]( v: R - )(mkEvent: (AccountId, Option[LenientUri]) => Event): AttachedEvent[R] = + )(mkEvent: (AccountInfo, Option[LenientUri]) => Event): AttachedEvent[R] = new AttachedEvent[R] { val value = v - def event(account: AccountId, baseUrl: Option[LenientUri]): Iterable[Event] = + def event(account: AccountInfo, baseUrl: Option[LenientUri]): Iterable[Event] = Some(mkEvent(account, baseUrl)) def map[U](f: R => U): AttachedEvent[U] = diff --git a/modules/backend/src/main/scala/docspell/backend/BackendCommands.scala b/modules/backend/src/main/scala/docspell/backend/BackendCommands.scala index 3550cfbf..d663981c 100644 --- a/modules/backend/src/main/scala/docspell/backend/BackendCommands.scala +++ b/modules/backend/src/main/scala/docspell/backend/BackendCommands.scala @@ -9,12 +9,11 @@ package docspell.backend import cats.data.{NonEmptyList => Nel} import cats.effect.Sync import cats.syntax.all._ - import docspell.backend.BackendCommands.EventContext import docspell.backend.ops.OCustomFields.SetValue import docspell.backend.ops._ import docspell.common.bc._ -import docspell.common.{AccountId, Ident, LenientUri} +import docspell.common._ private[backend] class BackendCommands[F[_]: Sync]( itemOps: OItem[F], @@ -25,14 +24,14 @@ private[backend] class BackendCommands[F[_]: Sync]( ) extends BackendCommandRunner[F, Unit] { private[this] val logger = docspell.logging.getLogger[F] - def run(collective: Ident, cmd: BackendCommand): F[Unit] = + def run(collective: CollectiveId, cmd: BackendCommand): F[Unit] = doRun(collective, cmd).attempt.flatMap { case Right(_) => ().pure[F] case Left(ex) => - logger.error(ex)(s"Backend command $cmd failed for collective ${collective.id}.") + logger.error(ex)(s"Backend command $cmd failed for collective $collective.") } - def doRun(collective: Ident, cmd: BackendCommand): F[Unit] = + def doRun(collective: CollectiveId, cmd: BackendCommand): F[Unit] = cmd match { case BackendCommand.ItemUpdate(item, actions) => actions.traverse_(a => runItemAction(collective, item, a)) @@ -41,38 +40,38 @@ private[backend] class BackendCommands[F[_]: Sync]( actions.traverse_(a => runAttachAction(collective, item, attach, a)) } - def runAll(collective: Ident, cmds: List[BackendCommand]): F[Unit] = + def runAll(collective: CollectiveId, cmds: List[BackendCommand]): F[Unit] = cmds.traverse_(run(collective, _)) - def runItemAction(collective: Ident, item: Ident, action: ItemAction): F[Unit] = + def runItemAction(collective: CollectiveId, item: Ident, action: ItemAction): F[Unit] = action match { case ItemAction.AddTags(tags) => - logger.debug(s"Setting tags $tags on ${item.id} for ${collective.id}") *> + logger.debug(s"Setting tags $tags on ${item.id} for ${collective.value}") *> itemOps .linkTags(item, tags.toList, collective) .flatMap(sendEvents) case ItemAction.RemoveTags(tags) => - logger.debug(s"Remove tags $tags on ${item.id} for ${collective.id}") *> + logger.debug(s"Remove tags $tags on ${item.id} for ${collective.value}") *> itemOps .removeTagsMultipleItems(Nel.of(item), tags.toList, collective) .flatMap(sendEvents) case ItemAction.ReplaceTags(tags) => - logger.debug(s"Replace tags $tags on ${item.id} for ${collective.id}") *> + logger.debug(s"Replace tags $tags on ${item.id} for $collective") *> itemOps .setTags(item, tags.toList, collective) .flatMap(sendEvents) case ItemAction.SetFolder(folder) => - logger.debug(s"Set folder $folder on ${item.id} for ${collective.id}") *> + logger.debug(s"Set folder $folder on ${item.id} for $collective") *> itemOps .setFolder(item, folder, collective) .void case ItemAction.RemoveTagsCategory(cats) => logger.debug( - s"Remove tags in categories $cats on ${item.id} for ${collective.id}" + s"Remove tags in categories $cats on ${item.id} for $collective" ) *> itemOps .removeTagsOfCategories(item, collective, cats) @@ -80,51 +79,51 @@ private[backend] class BackendCommands[F[_]: Sync]( case ItemAction.SetCorrOrg(id) => logger.debug( - s"Set correspondent organization ${id.map(_.id)} for ${collective.id}" + s"Set correspondent organization ${id.map(_.id)} for $collective" ) *> itemOps.setCorrOrg(Nel.of(item), id, collective).void case ItemAction.SetCorrPerson(id) => logger.debug( - s"Set correspondent person ${id.map(_.id)} for ${collective.id}" + s"Set correspondent person ${id.map(_.id)} for $collective" ) *> itemOps.setCorrPerson(Nel.of(item), id, collective).void case ItemAction.SetConcPerson(id) => logger.debug( - s"Set concerning person ${id.map(_.id)} for ${collective.id}" + s"Set concerning person ${id.map(_.id)} for $collective" ) *> itemOps.setConcPerson(Nel.of(item), id, collective).void case ItemAction.SetConcEquipment(id) => logger.debug( - s"Set concerning equipment ${id.map(_.id)} for ${collective.id}" + s"Set concerning equipment ${id.map(_.id)} for $collective" ) *> itemOps.setConcEquip(Nel.of(item), id, collective).void case ItemAction.SetField(field, value) => logger.debug( - s"Set field on item ${item.id} ${field.id} to '$value' for ${collective.id}" + s"Set field on item ${item.id} ${field.id} to '$value' for $collective" ) *> fieldOps .setValue(item, SetValue(field, value, collective)) .flatMap(sendEvents) case ItemAction.SetNotes(notes) => - logger.debug(s"Set notes on item ${item.id} for ${collective.id}") *> + logger.debug(s"Set notes on item ${item.id} for $collective") *> itemOps.setNotes(item, notes, collective).void case ItemAction.AddNotes(notes, sep) => - logger.debug(s"Add notes on item ${item.id} for ${collective.id}") *> + logger.debug(s"Add notes on item ${item.id} for $collective") *> itemOps.addNotes(item, notes, sep, collective).void case ItemAction.SetName(name) => - logger.debug(s"Set name '$name' on item ${item.id} for ${collective.id}") *> + logger.debug(s"Set name '$name' on item ${item.id} for $collective") *> itemOps.setName(item, name, collective).void } def runAttachAction( - collective: Ident, + collective: CollectiveId, itemId: Ident, attachId: Ident, action: AttachmentAction @@ -150,7 +149,7 @@ private[backend] class BackendCommands[F[_]: Sync]( object BackendCommands { /** If supplied, notification events will be send. */ - case class EventContext(account: AccountId, baseUrl: Option[LenientUri]) + case class EventContext(account: AccountInfo, baseUrl: Option[LenientUri]) def fromBackend[F[_]: Sync]( backendApp: BackendApp[F], diff --git a/modules/backend/src/main/scala/docspell/backend/JobFactory.scala b/modules/backend/src/main/scala/docspell/backend/JobFactory.scala index cf87d783..dd54dd2b 100644 --- a/modules/backend/src/main/scala/docspell/backend/JobFactory.scala +++ b/modules/backend/src/main/scala/docspell/backend/JobFactory.scala @@ -8,24 +8,23 @@ package docspell.backend import cats.effect._ import cats.implicits._ - import docspell.backend.MailAddressCodec import docspell.backend.task.DownloadZipArgs import docspell.common._ import docspell.notification.api.PeriodicQueryArgs import docspell.scheduler.Job +import docspell.scheduler.usertask.UserTaskScope object JobFactory extends MailAddressCodec { def existingItemAddon[F[_]: Sync]( args: ItemAddonTaskArgs, - submitter: AccountId + submitter: UserTaskScope ): F[Job[ItemAddonTaskArgs]] = Job.createNew( ItemAddonTaskArgs.taskName, - submitter.collective, + submitter, args, "Run addons on item", - submitter.user, Priority.High, args.addonRunConfigs .map(_.take(23)) @@ -39,179 +38,167 @@ object JobFactory extends MailAddressCodec { def downloadZip[F[_]: Sync]( args: DownloadZipArgs, summaryId: Ident, - submitter: AccountId + submitter: UserTaskScope ): F[Job[DownloadZipArgs]] = Job.createNew( DownloadZipArgs.taskName, - submitter.collective, + submitter, args, s"Prepare zip file for query", - submitter.user, Priority.High, Some(summaryId) ) def integrityCheck[F[_]: Sync]( args: FileIntegrityCheckArgs, - submitter: AccountId = DocspellSystem.account + submitter: UserTaskScope = UserTaskScope.system ): F[Job[FileIntegrityCheckArgs]] = Job.createNew( FileIntegrityCheckArgs.taskName, - submitter.collective, + submitter, args, s"Check integrity of files", - submitter.user, Priority.Low, Some(FileIntegrityCheckArgs.taskName) ) def fileCopy[F[_]: Sync]( args: FileCopyTaskArgs, - submitter: AccountId = DocspellSystem.account + submitter: UserTaskScope = UserTaskScope.system ): F[Job[FileCopyTaskArgs]] = Job.createNew( FileCopyTaskArgs.taskName, - submitter.collective, + submitter, args, "Copying all files", - submitter.user, Priority.High, Some(FileCopyTaskArgs.taskName) ) def periodicQuery[F[_]: Sync]( args: PeriodicQueryArgs, - submitter: AccountId + submitter: UserTaskScope ): F[Job[PeriodicQueryArgs]] = Job.createNew( PeriodicQueryArgs.taskName, - submitter.collective, + submitter, args, s"Running periodic query, notify via ${args.channels.map(_.channelType)}", - submitter.user, Priority.Low, None ) def makePageCount[F[_]: Sync]( args: MakePageCountArgs, - account: Option[AccountId] + submitter: UserTaskScope ): F[Job[MakePageCountArgs]] = Job.createNew( MakePageCountArgs.taskName, - account.map(_.collective).getOrElse(DocspellSystem.taskGroup), + submitter, args, s"Find page-count metadata for ${args.attachment.id}", - account.map(_.user).getOrElse(DocspellSystem.user), Priority.Low, Some(MakePageCountArgs.taskName / args.attachment) ) def makePreview[F[_]: Sync]( args: MakePreviewArgs, - account: Option[AccountId] + submitter: UserTaskScope ): F[Job[MakePreviewArgs]] = Job.createNew( MakePreviewArgs.taskName, - account.map(_.collective).getOrElse(DocspellSystem.taskGroup), + submitter, args, s"Generate preview image", - account.map(_.user).getOrElse(DocspellSystem.user), Priority.Low, Some(MakePreviewArgs.taskName / args.attachment) ) def allPreviews[F[_]: Sync]( args: AllPreviewsArgs, - submitter: Option[Ident] + submitter: UserTaskScope ): F[Job[AllPreviewsArgs]] = Job.createNew( AllPreviewsArgs.taskName, - args.collective.getOrElse(DocspellSystem.taskGroup), + submitter, args, "Create preview images", - submitter.getOrElse(DocspellSystem.user), Priority.Low, Some(DocspellSystem.allPreviewTaskTracker) ) def convertAllPdfs[F[_]: Sync]( - collective: Option[Ident], - submitter: Option[Ident], + args: ConvertAllPdfArgs, + submitter: UserTaskScope, prio: Priority ): F[Job[ConvertAllPdfArgs]] = Job.createNew( ConvertAllPdfArgs.taskName, - collective.getOrElse(DocspellSystem.taskGroup), - ConvertAllPdfArgs(collective), + submitter, + args, s"Convert all pdfs not yet converted", - submitter.getOrElse(DocspellSystem.user), prio, - collective - .map(c => c / ConvertAllPdfArgs.taskName) + args.collective + .map(c => c.valueAsIdent / ConvertAllPdfArgs.taskName) .orElse(ConvertAllPdfArgs.taskName.some) ) def reprocessItem[F[_]: Sync]( args: ReProcessItemArgs, - account: AccountId, + submitter: UserTaskScope, prio: Priority ): F[Job[ReProcessItemArgs]] = Job.createNew( ReProcessItemArgs.taskName, - account.collective, + submitter, args, s"Re-process files of item ${args.itemId.id}", - account.user, prio, Some(ReProcessItemArgs.taskName / args.itemId) ) def multiUpload[F[_]: Sync]( args: ProcessItemArgs, - account: AccountId, + submitter: UserTaskScope, prio: Priority, tracker: Option[Ident] ): F[Job[ProcessItemArgs]] = Job.createNew( ProcessItemArgs.multiUploadTaskName, - account.collective, + submitter, args, args.makeSubject, - account.user, prio, tracker ) def processItem[F[_]: Sync]( args: ProcessItemArgs, - account: AccountId, + submitter: UserTaskScope, prio: Priority, tracker: Option[Ident] ): F[Job[ProcessItemArgs]] = Job.createNew( ProcessItemArgs.taskName, - account.collective, + submitter, args, args.makeSubject, - account.user, prio, tracker ) def processItems[F[_]: Sync]( args: List[ProcessItemArgs], - account: AccountId, + submitter: UserTaskScope, prio: Priority, tracker: Option[Ident] ): F[List[Job[ProcessItemArgs]]] = { def create(arg: ProcessItemArgs): F[Job[ProcessItemArgs]] = Job.createNew( ProcessItemArgs.taskName, - account.collective, + submitter, arg, arg.makeSubject, - account.user, prio, tracker ) @@ -222,22 +209,23 @@ object JobFactory extends MailAddressCodec { def reIndexAll[F[_]: Sync]: F[Job[ReIndexTaskArgs]] = Job.createNew( ReIndexTaskArgs.taskName, - DocspellSystem.taskGroup, + UserTaskScope.system, ReIndexTaskArgs(None), "Recreate full-text index", - DocspellSystem.taskGroup, Priority.Low, Some(DocspellSystem.migrationTaskTracker) ) - def reIndex[F[_]: Sync](account: AccountId): F[Job[ReIndexTaskArgs]] = { - val args = ReIndexTaskArgs(Some(account.collective)) + def reIndex[F[_]: Sync]( + cid: CollectiveId, + submitterUserId: Option[Ident] + ): F[Job[ReIndexTaskArgs]] = { + val args = ReIndexTaskArgs(Some(cid)) Job.createNew( ReIndexTaskArgs.taskName, - account.collective, + UserTaskScope(cid, submitterUserId), args, "Recreate full-text index", - account.user, Priority.Low, Some(ReIndexTaskArgs.tracker(args)) ) diff --git a/modules/backend/src/main/scala/docspell/backend/auth/Login.scala b/modules/backend/src/main/scala/docspell/backend/auth/Login.scala index 0b36f733..9d265280 100644 --- a/modules/backend/src/main/scala/docspell/backend/auth/Login.scala +++ b/modules/backend/src/main/scala/docspell/backend/auth/Login.scala @@ -167,7 +167,7 @@ object Login { (for { _ <- validateToken key <- EitherT.fromOptionF( - store.transact(RTotp.findEnabledByLogin(sf.token.account.userId, true)), + store.transact(RTotp.findEnabledByUserId(sf.token.account.userId, true)), Result.invalidAuth ) now <- EitherT.right[Result](Timestamp.current[F]) diff --git a/modules/backend/src/main/scala/docspell/backend/fulltext/CreateIndex.scala b/modules/backend/src/main/scala/docspell/backend/fulltext/CreateIndex.scala index fd5f9214..6d32a18d 100644 --- a/modules/backend/src/main/scala/docspell/backend/fulltext/CreateIndex.scala +++ b/modules/backend/src/main/scala/docspell/backend/fulltext/CreateIndex.scala @@ -24,7 +24,7 @@ trait CreateIndex[F[_]] { */ def reIndexData( logger: Logger[F], - collective: Option[Ident], + collective: Option[CollectiveId], itemIds: Option[NonEmptyList[Ident]], chunkSize: Int ): F[Unit] @@ -40,7 +40,7 @@ object CreateIndex { new CreateIndex[F] { def reIndexData( logger: Logger[F], - collective: Option[Ident], + collective: Option[CollectiveId], itemIds: Option[NonEmptyList[Ident]], chunkSize: Int ): F[Unit] = { diff --git a/modules/backend/src/main/scala/docspell/backend/item/Merge.scala b/modules/backend/src/main/scala/docspell/backend/item/Merge.scala index 3fdfcce9..b463f1a8 100644 --- a/modules/backend/src/main/scala/docspell/backend/item/Merge.scala +++ b/modules/backend/src/main/scala/docspell/backend/item/Merge.scala @@ -21,7 +21,7 @@ import docspell.store.queries.QCustomField.FieldValue import docspell.store.records._ trait Merge[F[_]] { - def merge(items: NonEmptyList[Ident], collective: Ident): F[Merge.Result[RItem]] + def merge(items: NonEmptyList[Ident], collective: CollectiveId): F[Merge.Result[RItem]] } object Merge { @@ -41,7 +41,10 @@ object Merge { createIndex: CreateIndex[F] ): Merge[F] = new Merge[F] { - def merge(givenIds: NonEmptyList[Ident], collective: Ident): F[Result[RItem]] = + def merge( + givenIds: NonEmptyList[Ident], + collective: CollectiveId + ): F[Result[RItem]] = (for { items <- loadItems(givenIds, collective) ids = items.map(_.id) @@ -65,7 +68,7 @@ object Merge { def loadItems( items: NonEmptyList[Ident], - collective: Ident + collective: CollectiveId ): EitherT[F, Error, NonEmptyList[RItem]] = { val loaded = store diff --git a/modules/backend/src/main/scala/docspell/backend/joex/AddonOps.scala b/modules/backend/src/main/scala/docspell/backend/joex/AddonOps.scala index 16d42d3b..3fe95980 100644 --- a/modules/backend/src/main/scala/docspell/backend/joex/AddonOps.scala +++ b/modules/backend/src/main/scala/docspell/backend/joex/AddonOps.scala @@ -26,7 +26,7 @@ import docspell.store.records.AddonRunConfigResolved trait AddonOps[F[_]] { def execAll( - collective: Ident, + collective: CollectiveId, trigger: Set[AddonTriggerType], runConfigIds: Set[Ident], logger: Option[Logger[F]] @@ -34,7 +34,7 @@ trait AddonOps[F[_]] { middleware: Middleware[F] ): F[ExecResult] - def execById(collective: Ident, runConfigId: Ident, logger: Logger[F])( + def execById(collective: CollectiveId, runConfigId: Ident, logger: Logger[F])( middleware: Middleware[F] ): F[ExecResult] @@ -42,13 +42,16 @@ trait AddonOps[F[_]] { * filtered by given ids and triggers. */ def findAddonRefs( - collective: Ident, + collective: CollectiveId, trigger: Set[AddonTriggerType], runConfigIds: Set[Ident] ): F[List[AddonRunConfigRef]] /** Find enabled addon run config reference given an addon task id */ - def findAddonRef(collective: Ident, runConfigId: Ident): F[Option[AddonRunConfigRef]] + def findAddonRef( + collective: CollectiveId, + runConfigId: Ident + ): F[Option[AddonRunConfigRef]] /** Creates an executor for addons given a configuration. */ def getExecutor(cfg: AddonExecutorConfig): F[AddonExecutor[F]] @@ -58,7 +61,7 @@ trait AddonOps[F[_]] { object AddonOps { case class AddonRunConfigRef( id: Ident, - collective: Ident, + collective: CollectiveId, userId: Option[Ident], name: String, refs: List[AddonRef] @@ -110,7 +113,7 @@ object AddonOps { private val prepare = new AddonPrepare[F](store) def execAll( - collective: Ident, + collective: CollectiveId, trigger: Set[AddonTriggerType], runConfigIds: Set[Ident], logger: Option[Logger[F]] @@ -125,7 +128,7 @@ object AddonOps { results <- runCfgs.traverse(r => execRunConfig(log, r, custom)) } yield ExecResult(results.flatMap(_.result), runCfgs) - def execById(collective: Ident, runConfigId: Ident, logger: Logger[F])( + def execById(collective: CollectiveId, runConfigId: Ident, logger: Logger[F])( custom: Middleware[F] ): F[ExecResult] = (for { @@ -167,7 +170,7 @@ object AddonOps { Async[F].pure(AddonExecutor(cfg, urlReader)) def findAddonRefs( - collective: Ident, + collective: CollectiveId, trigger: Set[AddonTriggerType], runConfigIds: Set[Ident] ): F[List[AddonRunConfigRef]] = @@ -183,7 +186,7 @@ object AddonOps { .map(_.map(AddonRunConfigRef.fromResolved)) def findAddonRef( - collective: Ident, + collective: CollectiveId, runConfigId: Ident ): F[Option[AddonRunConfigRef]] = OptionT( diff --git a/modules/backend/src/main/scala/docspell/backend/joex/AddonPostProcess.scala b/modules/backend/src/main/scala/docspell/backend/joex/AddonPostProcess.scala index dddaee5f..26b83f85 100644 --- a/modules/backend/src/main/scala/docspell/backend/joex/AddonPostProcess.scala +++ b/modules/backend/src/main/scala/docspell/backend/joex/AddonPostProcess.scala @@ -10,7 +10,6 @@ import cats.data.OptionT import cats.effect.kernel.Sync import cats.syntax.all._ import fs2.io.file.{Files, Path} - import docspell.addons._ import docspell.addons.out.{AddonOutput, ItemFile, NewItem} import docspell.backend.JobFactory @@ -20,6 +19,7 @@ import docspell.common.bc.BackendCommandRunner import docspell.common.syntax.file._ import docspell.logging.Logger import docspell.scheduler.JobStore +import docspell.scheduler.usertask.UserTaskScope import docspell.store.Store import docspell.store.records._ @@ -32,7 +32,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files]( def onResult( logger: Logger[F], - collective: Ident, + collective: CollectiveId, result: AddonExecutionResult, outputDir: Path ): F[Unit] = @@ -45,7 +45,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files]( def onSuccess( logger: Logger[F], - collective: Ident, + collective: CollectiveId, output: AddonOutput, outputDir: Path ): F[Unit] = @@ -60,7 +60,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files]( def submitNewItem( logger: Logger[F], - collective: Ident, + collective: CollectiveId, outputDir: Path )(newItem: NewItem): F[Unit] = for { @@ -85,13 +85,17 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files]( newItem.toProcessMeta(collective, collLang, "addon"), uploaded.map(f => ProcessItemArgs.File(f._1.some, f._2)) ) - account = AccountId(collective, DocspellSystem.user) - job <- JobFactory.processItem(args, account, Priority.High, None) + job <- JobFactory.processItem( + args, + UserTaskScope.collective(collective), + Priority.High, + None + ) _ <- jobStore.insert(job.encode) _ <- logger.debug(s"Submitted job for processing: ${job.id}") } yield () - def updateOne(logger: Logger[F], collective: Ident, outputDir: Path)( + def updateOne(logger: Logger[F], collective: CollectiveId, outputDir: Path)( itemFile: ItemFile ): F[Unit] = for { @@ -123,7 +127,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files]( def submitNewFiles( logger: Logger[F], - collective: Ident, + collective: CollectiveId, outputDir: Path )(itemFile: ItemFile): F[Unit] = for { @@ -131,7 +135,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files]( collLang <- store.transact(RCollective.findLanguage(collective)) newFiles <- itemFile.resolveNewFiles(logger, outputDir) byMeta = newFiles.groupBy(_._1.metadata).view.mapValues(_.map(_._2)) - account = AccountId(collective, DocspellSystem.user) + submitter = UserTaskScope.collective(collective) _ <- byMeta.toList.traverse_ { case (meta, files) => for { uploaded <- files.traverse(file => @@ -151,7 +155,7 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files]( meta.toProcessMeta(collective, itemFile.itemId, collLang, "addon"), uploaded.map(f => ProcessItemArgs.File(f._1.some, f._2)) ) - job <- JobFactory.processItem(args, account, Priority.High, None) + job <- JobFactory.processItem(args, submitter, Priority.High, None) _ <- jobStore.insert(job.encode) _ <- logger.debug(s"Submitted job for processing: ${job.id}") } yield () @@ -168,19 +172,29 @@ final private[joex] class AddonPostProcess[F[_]: Sync: Files]( .semiflatMap(run) .getOrElseF(logger.warn(s"Cannot find attachment for $key to update text!")) - private def setText(collective: Ident, ra: RAttachment, readText: F[String]): F[Unit] = + private def setText( + collective: CollectiveId, + ra: RAttachment, + readText: F[String] + ): F[Unit] = attachOps.setExtractedText(collective, ra.itemId, ra.id, readText) private def replacePdf( - collective: Ident, + collective: CollectiveId, ra: RAttachment, file: Path, generatePreview: Boolean ): F[Unit] = - attachOps.addOrReplacePdf(collective, ra.id, file.readAll, generatePreview) + attachOps.addOrReplacePdf( + collective, + ra.id, + file.readAll, + generatePreview, + UserTaskScope.collective(collective) + ) private def replacePreview( - collective: Ident, + collective: CollectiveId, attachId: Ident, imageData: Path ): F[Unit] = diff --git a/modules/backend/src/main/scala/docspell/backend/joex/AddonPrepare.scala b/modules/backend/src/main/scala/docspell/backend/joex/AddonPrepare.scala index fb14e77b..87097842 100644 --- a/modules/backend/src/main/scala/docspell/backend/joex/AddonPrepare.scala +++ b/modules/backend/src/main/scala/docspell/backend/joex/AddonPrepare.scala @@ -9,15 +9,14 @@ package docspell.backend.joex import cats.data.{Kleisli, OptionT} import cats.effect._ import cats.syntax.all._ - import docspell.addons.Middleware import docspell.backend.auth.AuthToken import docspell.backend.joex.AddonOps.AddonRunConfigRef import docspell.common._ import docspell.logging.Logger import docspell.store.Store -import docspell.store.records.{RNode, RUser} - +import docspell.store.queries.QLogin +import docspell.store.records.RNode import scodec.bits.ByteVector private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExtension { @@ -46,8 +45,7 @@ private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExte ): F[Middleware[F]] = (for { userId <- OptionT.fromOption[F](runConfigRef.userId) - user <- OptionT(store.transact(RUser.getIdByIdOrLogin(userId))) - account = AccountId(runConfigRef.collective, user.login) + account <- OptionT(store.transact(QLogin.findUser(userId))).map(_.account) env = Middleware.prepare[F]( Kleisli(input => makeDscEnv(account, tokenValidity).map(input.addEnv)) @@ -58,7 +56,7 @@ private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExte * Additionally a random rest-server is looked up from the database to set its url. */ def makeDscEnv( - accountId: AccountId, + account: AccountInfo, tokenValidity: Duration ): F[Map[String, String]] = for { @@ -71,7 +69,7 @@ private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExte secret = serverNode.flatMap(_.serverSecret) token <- AuthToken.user( - accountId, + account, false, secret.getOrElse(ByteVector.empty), tokenValidity.some diff --git a/modules/backend/src/main/scala/docspell/backend/joex/FindJobOwnerAccount.scala b/modules/backend/src/main/scala/docspell/backend/joex/FindJobOwnerAccount.scala new file mode 100644 index 00000000..500ac28b --- /dev/null +++ b/modules/backend/src/main/scala/docspell/backend/joex/FindJobOwnerAccount.scala @@ -0,0 +1,17 @@ +package docspell.backend.joex + +import docspell.common.AccountId +import docspell.scheduler.FindJobOwner +import docspell.store.Store +import docspell.store.queries.QLogin + +/** Finds the job submitter account by using the group as collective and submitter as + * login. + */ +object FindJobOwnerAccount { + def apply[F[_]](store: Store[F]): FindJobOwner[F] = + FindJobOwner.of { job => + val accountId = AccountId(job.group, job.submitter) + store.transact(QLogin.findAccount(accountId)) + } +} diff --git a/modules/backend/src/main/scala/docspell/backend/ops/AddonRunConfigValidate.scala b/modules/backend/src/main/scala/docspell/backend/ops/AddonRunConfigValidate.scala index c675c13d..f9d96762 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/AddonRunConfigValidate.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/AddonRunConfigValidate.scala @@ -9,16 +9,15 @@ package docspell.backend.ops import cats.data.NonEmptyList import cats.effect._ import cats.syntax.all._ - import docspell.backend.ops.AddonRunConfigError._ import docspell.backend.ops.OAddons.{AddonRunConfigResult, AddonRunInsert} -import docspell.common.Ident +import docspell.common.CollectiveId import docspell.store.Store import docspell.store.records.RAddonArchive object AddonRunConfigValidate { - def apply[F[_]: Sync](store: Store[F], cid: Ident)( + def apply[F[_]: Sync](store: Store[F], cid: CollectiveId)( cfg: AddonRunInsert ): F[AddonRunConfigResult[AddonRunInsert]] = { val init: AddonRunConfigResult[Unit] = ().asRight @@ -31,7 +30,7 @@ object AddonRunConfigValidate { .map(_.as(cfg)) } - def checkTriggers[F[_]: Sync](store: Store[F], cid: Ident)( + def checkTriggers[F[_]: Sync](store: Store[F], cid: CollectiveId)( cfg: AddonRunInsert ): F[AddonRunConfigResult[Unit]] = for { diff --git a/modules/backend/src/main/scala/docspell/backend/ops/AddonValidate.scala b/modules/backend/src/main/scala/docspell/backend/ops/AddonValidate.scala index 802d6f24..3e0a9632 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/AddonValidate.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/AddonValidate.scala @@ -11,12 +11,11 @@ import cats.effect._ import cats.syntax.all._ import fs2.Stream import fs2.io.file.Path - import docspell.addons.{AddonMeta, RunnerType} import docspell.backend.Config import docspell.backend.ops.AddonValidationError._ import docspell.backend.ops.OAddons.AddonValidationResult -import docspell.common.{Ident, LenientUri, UrlReader} +import docspell.common.{CollectiveId, LenientUri, UrlReader} import docspell.joexapi.model.AddonSupport import docspell.store.Store import docspell.store.records.RAddonArchive @@ -29,7 +28,7 @@ final class AddonValidate[F[_]: Async]( private[this] val logger = docspell.logging.getLogger[F] def fromUrl( - collective: Ident, + collective: CollectiveId, url: LenientUri, reader: UrlReader[F], localUrl: Option[LenientUri] = None, @@ -47,7 +46,7 @@ final class AddonValidate[F[_]: Async]( else archive(collective, reader(localUrl.getOrElse(url)).asRight, checkExisting) def archive( - collective: Ident, + collective: CollectiveId, addonData: Either[Path, Stream[F, Byte]], checkExisting: Boolean = true ): F[AddonValidationResult[AddonMeta]] = diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OAddons.scala b/modules/backend/src/main/scala/docspell/backend/ops/OAddons.scala index 851f0362..4426658b 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OAddons.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OAddons.scala @@ -30,45 +30,46 @@ trait OAddons[F[_]] { * exists. */ def registerAddon( - collective: Ident, + collective: CollectiveId, url: LenientUri, logger: Option[Logger[F]] ): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] /** Refreshes an existing addon by downloading it again and updating metadata. */ def refreshAddon( - collective: Ident, + collective: CollectiveId, addonId: Ident ): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] /** Look into the addon at the given url and return its metadata. */ def inspectAddon( - collective: Ident, + collective: CollectiveId, url: LenientUri ): F[AddonValidationResult[AddonMeta]] /** Deletes the addon if it exists. */ - def deleteAddon(collective: Ident, addonId: Ident): F[Boolean] + def deleteAddon(collective: CollectiveId, addonId: Ident): F[Boolean] - def getAllAddons(collective: Ident): F[List[RAddonArchive]] + def getAllAddons(collective: CollectiveId): F[List[RAddonArchive]] /** Inserts or updates the addon run configuration. If it already exists (and the given * id is non empty), it will be completely replaced with the given one. */ def upsertAddonRunConfig( - collective: Ident, + collective: CollectiveId, runConfig: AddonRunInsert ): F[AddonRunConfigResult[Ident]] /** Deletes this task from the database. */ - def deleteAddonRunConfig(collective: Ident, runConfigId: Ident): F[Boolean] + def deleteAddonRunConfig(collective: CollectiveId, runConfigId: Ident): F[Boolean] - def getAllAddonRunConfigs(collective: Ident): F[List[AddonRunInfo]] + def getAllAddonRunConfigs(collective: CollectiveId): F[List[AddonRunInfo]] def runAddonForItem( - account: AccountId, + cid: CollectiveId, itemIds: NonEmptyList[Ident], - addonRunConfigIds: Set[Ident] + addonRunConfigIds: Set[Ident], + submitter: UserTaskScope ): F[Unit] } @@ -141,7 +142,7 @@ object OAddons { private val zip = MimeType.zip.asString private val addonValidate = new AddonValidate[F](cfg, store, joex) - def getAllAddonRunConfigs(collective: Ident): F[List[AddonRunInfo]] = + def getAllAddonRunConfigs(collective: CollectiveId): F[List[AddonRunInfo]] = for { all <- store.transact(AddonRunConfigData.findAll(collective)) runConfigIDs = all.map(_.runConfig.id).toSet @@ -168,7 +169,7 @@ object OAddons { } yield result def upsertAddonRunConfig( - collective: Ident, + collective: CollectiveId, runConfig: AddonRunInsert ): F[AddonRunConfigResult[Ident]] = { val insertDataRaw = AddonRunConfigData( @@ -246,7 +247,10 @@ object OAddons { .value } - def deleteAddonRunConfig(collective: Ident, runConfigId: Ident): F[Boolean] = { + def deleteAddonRunConfig( + collective: CollectiveId, + runConfigId: Ident + ): F[Boolean] = { val deleteRunConfig = (for { e <- OptionT(RAddonRunConfig.findById(collective, runConfigId)) @@ -264,20 +268,20 @@ object OAddons { } yield deleted } - def getAllAddons(collective: Ident): F[List[RAddonArchive]] = + def getAllAddons(collective: CollectiveId): F[List[RAddonArchive]] = store.transact(RAddonArchive.listAll(collective)) - def deleteAddon(collective: Ident, addonId: Ident): F[Boolean] = + def deleteAddon(collective: CollectiveId, addonId: Ident): F[Boolean] = store.transact(RAddonArchive.deleteById(collective, addonId)).map(_ > 0) def inspectAddon( - collective: Ident, + collective: CollectiveId, url: LenientUri ): F[AddonValidationResult[AddonMeta]] = addonValidate.fromUrl(collective, url, urlReader, checkExisting = false) def registerAddon( - collective: Ident, + collective: CollectiveId, url: LenientUri, logger: Option[Logger[F]] ): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] = { @@ -294,7 +298,9 @@ object OAddons { .as(AddonValidationResult.failure[(RAddonArchive, AddonMeta)](error)) } - log.info(s"Store addon file from '${url.asString} for ${collective.id}") *> + log.info( + s"Store addon file from '${url.asString} for collective ${collective.value}" + ) *> storeAddonFromUrl(collective, url).flatMapF { file => val localUrl = FileUrlReader.url(file) for { @@ -306,7 +312,7 @@ object OAddons { } def refreshAddon( - collective: Ident, + collective: CollectiveId, addonId: Ident ): F[AddonValidationResult[(RAddonArchive, AddonMeta)]] = { val findAddon = store @@ -371,7 +377,7 @@ object OAddons { } private def insertAddon( - collective: Ident, + collective: CollectiveId, url: LenientUri, meta: AddonMeta, file: FileKey @@ -392,7 +398,7 @@ object OAddons { .onError(_ => store.fileRepo.delete(file)) } yield record - private def storeAddonFromUrl(collective: Ident, url: LenientUri) = + private def storeAddonFromUrl(collective: CollectiveId, url: LenientUri) = for { urlFile <- EitherT.pure(url.path.segments.lastOption) file <- EitherT( @@ -412,15 +418,16 @@ object OAddons { } yield file def runAddonForItem( - account: AccountId, + cid: CollectiveId, itemIds: NonEmptyList[Ident], - addonRunConfigIds: Set[Ident] + addonRunConfigIds: Set[Ident], + submitter: UserTaskScope ): F[Unit] = for { jobs <- itemIds.traverse(id => JobFactory.existingItemAddon( - ItemAddonTaskArgs(account.collective, id, addonRunConfigIds), - account + ItemAddonTaskArgs(cid, id, addonRunConfigIds), + submitter ) ) _ <- jobStore.insertAllIfNew(jobs.map(_.encode).toList) diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OAttachment.scala b/modules/backend/src/main/scala/docspell/backend/ops/OAttachment.scala index b5f0ddd8..80b51ba0 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OAttachment.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OAttachment.scala @@ -10,13 +10,13 @@ import cats.data.{NonEmptyList => Nel, OptionT} import cats.effect._ import cats.syntax.all._ import fs2.Stream - import docspell.backend.JobFactory import docspell.common.MakePreviewArgs.StoreMode import docspell.common._ import docspell.files.TikaMimetype import docspell.ftsclient.{FtsClient, TextData} import docspell.scheduler.JobStore +import docspell.scheduler.usertask.UserTaskScope import docspell.store.Store import docspell.store.queries.QAttachment import docspell.store.records._ @@ -24,21 +24,22 @@ import docspell.store.records._ trait OAttachment[F[_]] { def setExtractedText( - collective: Ident, + collective: CollectiveId, itemId: Ident, attachId: Ident, newText: F[String] ): F[Unit] def addOrReplacePdf( - collective: Ident, + collective: CollectiveId, attachId: Ident, pdfData: Stream[F, Byte], - regeneratePreview: Boolean + regeneratePreview: Boolean, + submitter: UserTaskScope ): F[Unit] def addOrReplacePreview( - collective: Ident, + collective: CollectiveId, attachId: Ident, imageData: Stream[F, Byte] ): F[Unit] @@ -55,7 +56,7 @@ object OAttachment { private[this] val logger = docspell.logging.getLogger[F] def setExtractedText( - collective: Ident, + collective: CollectiveId, itemId: Ident, attachId: Ident, newText: F[String] @@ -104,24 +105,22 @@ object OAttachment { } yield () def addOrReplacePdf( - collective: Ident, + collective: CollectiveId, attachId: Ident, pdfData: Stream[F, Byte], - regeneratePreview: Boolean + regeneratePreview: Boolean, + submitter: UserTaskScope ): F[Unit] = { def generatePreview(ra: RAttachment): F[Unit] = JobFactory - .makePreview(MakePreviewArgs(ra.id, StoreMode.Replace), None) + .makePreview(MakePreviewArgs(ra.id, StoreMode.Replace), submitter) .map(_.encode) .flatMap(jobStore.insert) *> logger.info(s"Job submitted to re-generate preview from new pdf") def generatePageCount(ra: RAttachment): F[Unit] = JobFactory - .makePageCount( - MakePageCountArgs(ra.id), - AccountId(collective, DocspellSystem.user).some - ) + .makePageCount(MakePageCountArgs(ra.id), submitter) .map(_.encode) .flatMap(jobStore.insert) *> logger.info(s"Job submitted to find page count from new pdf") @@ -168,7 +167,7 @@ object OAttachment { } def addOrReplacePreview( - collective: Ident, + collective: CollectiveId, attachId: Ident, imageData: Stream[F, Byte] ): F[Unit] = { diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OClientSettings.scala b/modules/backend/src/main/scala/docspell/backend/ops/OClientSettings.scala index 0db6ce2d..f98b56dd 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OClientSettings.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OClientSettings.scala @@ -11,29 +11,31 @@ import cats.data.OptionT import cats.effect.{Async, Resource} import cats.implicits._ -import docspell.common.AccountId import docspell.common._ import docspell.store.Store import docspell.store.records.RClientSettingsCollective import docspell.store.records.RClientSettingsUser -import docspell.store.records.RUser import io.circe.Json trait OClientSettings[F[_]] { - def deleteUser(clientId: Ident, account: AccountId): F[Boolean] - def saveUser(clientId: Ident, account: AccountId, data: Json): F[Unit] - def loadUser(clientId: Ident, account: AccountId): F[Option[RClientSettingsUser]] + def deleteUser(clientId: Ident, userId: Ident): F[Boolean] + def saveUser(clientId: Ident, userId: Ident, data: Json): F[Unit] + def loadUser(clientId: Ident, userId: Ident): F[Option[RClientSettingsUser]] - def deleteCollective(clientId: Ident, account: AccountId): F[Boolean] - def saveCollective(clientId: Ident, account: AccountId, data: Json): F[Unit] + def deleteCollective(clientId: Ident, collectiveId: CollectiveId): F[Boolean] + def saveCollective(clientId: Ident, collectiveId: CollectiveId, data: Json): F[Unit] def loadCollective( clientId: Ident, - account: AccountId + collectiveId: CollectiveId ): F[Option[RClientSettingsCollective]] - def loadMerged(clientId: Ident, account: AccountId): F[Option[Json]] + def loadMerged( + clientId: Ident, + collectiveId: CollectiveId, + userId: Ident + ): F[Option[Json]] } object OClientSettings { @@ -41,22 +43,18 @@ object OClientSettings { Resource.pure[F, OClientSettings[F]](new OClientSettings[F] { val log = docspell.logging.getLogger[F] - private def getUserId(account: AccountId): OptionT[F, Ident] = - OptionT(store.transact(RUser.findByAccount(account))).map(_.uid) - - def deleteCollective(clientId: Ident, account: AccountId): F[Boolean] = + def deleteCollective(clientId: Ident, collectiveId: CollectiveId): F[Boolean] = store - .transact(RClientSettingsCollective.delete(clientId, account.collective)) + .transact(RClientSettingsCollective.delete(clientId, collectiveId)) .map(_ > 0) - def deleteUser(clientId: Ident, account: AccountId): F[Boolean] = + def deleteUser(clientId: Ident, userId: Ident): F[Boolean] = (for { _ <- OptionT.liftF( log.debug( - s"Deleting client settings for client ${clientId.id} and account $account" + s"Deleting client settings for client ${clientId.id} and user ${userId.id}" ) ) - userId <- getUserId(account) n <- OptionT.liftF( store.transact( RClientSettingsUser.delete(clientId, userId) @@ -64,24 +62,27 @@ object OClientSettings { ) } yield n > 0).getOrElse(false) - def saveCollective(clientId: Ident, account: AccountId, data: Json): F[Unit] = + def saveCollective( + clientId: Ident, + collectiveId: CollectiveId, + data: Json + ): F[Unit] = for { n <- store.transact( - RClientSettingsCollective.upsert(clientId, account.collective, data) + RClientSettingsCollective.upsert(clientId, collectiveId, data) ) _ <- if (n <= 0) Async[F].raiseError(new IllegalStateException("No rows updated!")) else ().pure[F] } yield () - def saveUser(clientId: Ident, account: AccountId, data: Json): F[Unit] = + def saveUser(clientId: Ident, userId: Ident, data: Json): F[Unit] = (for { _ <- OptionT.liftF( log.debug( - s"Storing client settings for client ${clientId.id} and account $account" + s"Storing client settings for client ${clientId.id} and user ${userId.id}" ) ) - userId <- getUserId(account) n <- OptionT.liftF( store.transact(RClientSettingsUser.upsert(clientId, userId, data)) ) @@ -93,25 +94,24 @@ object OClientSettings { def loadCollective( clientId: Ident, - account: AccountId + collectiveId: CollectiveId ): F[Option[RClientSettingsCollective]] = - store.transact(RClientSettingsCollective.find(clientId, account.collective)) + store.transact(RClientSettingsCollective.find(clientId, collectiveId)) - def loadUser(clientId: Ident, account: AccountId): F[Option[RClientSettingsUser]] = + def loadUser(clientId: Ident, userId: Ident): F[Option[RClientSettingsUser]] = (for { _ <- OptionT.liftF( log.debug( - s"Loading client settings for client ${clientId.id} and account $account" + s"Loading client settings for client ${clientId.id} and user ${userId.id}" ) ) - userId <- getUserId(account) data <- OptionT(store.transact(RClientSettingsUser.find(clientId, userId))) } yield data).value - def loadMerged(clientId: Ident, account: AccountId) = + def loadMerged(clientId: Ident, collectiveId: CollectiveId, userId: Ident) = for { - collData <- loadCollective(clientId, account) - userData <- loadUser(clientId, account) + collData <- loadCollective(clientId, collectiveId) + userData <- loadUser(clientId, userId) mergedData = collData.map(_.settingsData) |+| userData.map(_.settingsData) } yield mergedData diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala b/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala index dfcf587d..0613cd06 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala @@ -6,10 +6,10 @@ package docspell.backend.ops +import cats.data.OptionT import cats.effect.{Async, Resource} import cats.implicits._ import fs2.Stream - import docspell.backend.JobFactory import docspell.backend.PasswordCrypt import docspell.backend.ops.OCollective._ @@ -20,35 +20,39 @@ import docspell.store.UpdateResult import docspell.store.queries.{QCollective, QUser} import docspell.store.records._ import docspell.store.{AddResult, Store} - import com.github.eikek.calev._ trait OCollective[F[_]] { def find(name: Ident): F[Option[RCollective]] - def updateSettings(collective: Ident, settings: OCollective.Settings): F[AddResult] + def updateSettings( + collective: CollectiveId, + collectiveName: Ident, + settings: OCollective.Settings + ): F[AddResult] - def findSettings(collective: Ident): F[Option[OCollective.Settings]] + def findSettings(collective: CollectiveId): F[Option[OCollective.Settings]] - def listUser(collective: Ident): F[Vector[RUser]] + def listUser(collective: CollectiveId): F[Vector[RUser]] def add(s: RUser): F[AddResult] def update(s: RUser): F[AddResult] /** Deletes the user and all its data. */ - def deleteUser(login: Ident, collective: Ident): F[UpdateResult] + def deleteUser(userId: Ident): F[UpdateResult] /** Return an excerpt of what would be deleted, when the user is deleted. */ - def getDeleteUserData(accountId: AccountId): F[DeleteUserData] + def getDeleteUserData(cid: CollectiveId, userId: Ident): F[DeleteUserData] - def insights(collective: Ident): F[InsightData] + def insights(collective: CollectiveId): F[InsightData] - def tagCloud(collective: Ident): F[List[TagCount]] + def tagCloud(collective: CollectiveId): F[List[TagCount]] def changePassword( - accountId: AccountId, + collectiveId: CollectiveId, + userId: Ident, current: Password, newPass: Password ): F[PassChangeResult] @@ -56,20 +60,21 @@ trait OCollective[F[_]] { def resetPassword(accountId: AccountId): F[PassResetResult] def getContacts( - collective: Ident, + collective: CollectiveId, query: Option[String], kind: Option[ContactKind] ): Stream[F, RContact] def findEnabledSource(sourceId: Ident): F[Option[RSource]] - def addPassword(collective: Ident, pw: Password): F[Unit] + def addPassword(collective: CollectiveId, pw: Password): F[Unit] - def getPasswords(collective: Ident): F[List[RCollectivePassword]] + def getPasswords(collective: CollectiveId): F[List[RCollectivePassword]] + /** Removes a password from the list given the id of `RCollectivePassword` */ def removePassword(id: Ident): F[Unit] - def startLearnClassifier(collective: Ident): F[Unit] + def startLearnClassifier(collective: CollectiveId): F[Unit] def startEmptyTrash(args: EmptyTrashArgs): F[Unit] @@ -78,7 +83,8 @@ trait OCollective[F[_]] { */ def generatePreviews( storeMode: MakePreviewArgs.StoreMode, - account: AccountId + collectiveId: CollectiveId, + submitter: UserTaskScope ): F[UpdateResult] } @@ -137,26 +143,33 @@ object OCollective { ): Resource[F, OCollective[F]] = Resource.pure[F, OCollective[F]](new OCollective[F] { def find(name: Ident): F[Option[RCollective]] = - store.transact(RCollective.findById(name)) + store.transact(RCollective.findByName(name)) - def updateSettings(collective: Ident, sett: Settings): F[AddResult] = + def updateSettings( + collectiveId: CollectiveId, + collectiveName: Ident, + sett: Settings + ): F[AddResult] = store - .transact(RCollective.updateSettings(collective, sett)) + .transact(RCollective.updateSettings(collectiveId, sett)) .attempt .map(AddResult.fromUpdate) .flatMap(res => - updateLearnClassifierTask(collective, sett) *> updateEmptyTrashTask( - collective, + updateLearnClassifierTask(collectiveId, sett) *> updateEmptyTrashTask( + collectiveId, sett ) *> res.pure[F] ) - private def updateLearnClassifierTask(coll: Ident, sett: Settings): F[Unit] = + private def updateLearnClassifierTask( + cid: CollectiveId, + sett: Settings + ): F[Unit] = for { id <- Ident.randomId[F] on = sett.classifier.exists(_.enabled) timer = sett.classifier.map(_.schedule).getOrElse(CalEvent.unsafe("")) - args = LearnClassifierArgs(coll) + args = LearnClassifierArgs(cid) ut = UserTask( id, LearnClassifierArgs.taskName, @@ -165,36 +178,41 @@ object OCollective { None, args ) - _ <- uts.updateOneTask(UserTaskScope(coll), args.makeSubject.some, ut) + _ <- uts.updateOneTask(UserTaskScope.collective(cid), args.makeSubject.some, ut) _ <- joex.notifyAllNodes } yield () - private def updateEmptyTrashTask(coll: Ident, sett: Settings): F[Unit] = + private def updateEmptyTrashTask( + cid: CollectiveId, + sett: Settings + ): F[Unit] = for { id <- Ident.randomId[F] settings = sett.emptyTrash.getOrElse(EmptyTrash.default) - args = EmptyTrashArgs(coll, settings.minAge) + args = EmptyTrashArgs(cid, settings.minAge) ut = UserTask(id, EmptyTrashArgs.taskName, true, settings.schedule, None, args) - _ <- uts.updateOneTask(UserTaskScope(coll), args.makeSubject.some, ut) + _ <- uts.updateOneTask(UserTaskScope.collective(cid), args.makeSubject.some, ut) _ <- joex.notifyAllNodes } yield () - def addPassword(collective: Ident, pw: Password): F[Unit] = + def addPassword(collective: CollectiveId, pw: Password): F[Unit] = for { cpass <- RCollectivePassword.createNew[F](collective, pw) _ <- store.transact(RCollectivePassword.upsert(cpass)) } yield () - def getPasswords(collective: Ident): F[List[RCollectivePassword]] = + def getPasswords(collective: CollectiveId): F[List[RCollectivePassword]] = store.transact(RCollectivePassword.findAll(collective)) def removePassword(id: Ident): F[Unit] = store.transact(RCollectivePassword.deleteById(id)).map(_ => ()) - def startLearnClassifier(collective: Ident): F[Unit] = + def startLearnClassifier( + collectiveId: CollectiveId + ): F[Unit] = for { id <- Ident.randomId[F] - args = LearnClassifierArgs(collective) + args = LearnClassifierArgs(collectiveId) ut = UserTask( id, LearnClassifierArgs.taskName, @@ -204,7 +222,11 @@ object OCollective { args ) _ <- uts - .executeNow(UserTaskScope(collective), args.makeSubject.some, ut) + .executeNow( + UserTaskScope.collective(args.collectiveId), + args.makeSubject.some, + ut + ) } yield () def startEmptyTrash(args: EmptyTrashArgs): F[Unit] = @@ -219,13 +241,17 @@ object OCollective { args ) _ <- uts - .executeNow(UserTaskScope(args.collective), args.makeSubject.some, ut) + .executeNow( + UserTaskScope.collective(args.collective), + args.makeSubject.some, + ut + ) } yield () - def findSettings(collective: Ident): F[Option[OCollective.Settings]] = + def findSettings(collective: CollectiveId): F[Option[OCollective.Settings]] = store.transact(RCollective.getSettings(collective)) - def listUser(collective: Ident): F[Vector[RUser]] = + def listUser(collective: CollectiveId): F[Vector[RUser]] = store.transact(RUser.findAll(collective, _.login)) def add(s: RUser): F[AddResult] = @@ -240,47 +266,48 @@ object OCollective { def update(s: RUser): F[AddResult] = store.add(RUser.update(s), RUser.exists(s.login)) - def getDeleteUserData(accountId: AccountId): F[DeleteUserData] = - store.transact(QUser.getUserData(accountId)) + def getDeleteUserData(cid: CollectiveId, userId: Ident): F[DeleteUserData] = + store.transact(QUser.getUserData(cid, userId)) - def deleteUser(login: Ident, collective: Ident): F[UpdateResult] = + def deleteUser(userId: Ident): F[UpdateResult] = UpdateResult.fromUpdate( - store.transact(QUser.deleteUserAndData(AccountId(collective, login))) + store.transact(QUser.deleteUserAndData(userId)) ) - def insights(collective: Ident): F[InsightData] = + def insights(collective: CollectiveId): F[InsightData] = store.transact(QCollective.getInsights(collective)) - def tagCloud(collective: Ident): F[List[TagCount]] = + def tagCloud(collective: CollectiveId): F[List[TagCount]] = store.transact(QCollective.tagCloud(collective)) def resetPassword(accountId: AccountId): F[PassResetResult] = - for { - newPass <- Password.generate[F] - optUser <- store.transact(RUser.findByAccount(accountId)) - n <- store.transact( - RUser.updatePassword(accountId, PasswordCrypt.crypt(newPass)) + (for { + user <- OptionT(store.transact(RUser.findByAccount(accountId))) + newPass <- OptionT.liftF(Password.generate[F]) + + doUpdate = store.transact( + RUser.updatePassword(user.cid, user.uid, PasswordCrypt.crypt(newPass)) ) - res = - if (optUser.exists(_.source != AccountSource.Local)) - PassResetResult.userNotLocal - else if (n <= 0) PassResetResult.notFound - else PassResetResult.success(newPass) - } yield res + res <- + if (user.source != AccountSource.Local) + OptionT.pure[F](PassResetResult.userNotLocal) + else OptionT.liftF(doUpdate.as(PassResetResult.success(newPass))) + } yield res).getOrElse(PassResetResult.notFound) def changePassword( - accountId: AccountId, + collectiveId: CollectiveId, + userId: Ident, current: Password, newPass: Password ): F[PassChangeResult] = { val q = for { - optUser <- RUser.findByAccount(accountId) + optUser <- RUser.findById(userId, collectiveId.some) check = optUser.map(_.password).map(p => PasswordCrypt.check(current, p)) n <- check .filter(identity) .traverse(_ => - RUser.updatePassword(accountId, PasswordCrypt.crypt(newPass)) + RUser.updatePassword(collectiveId, userId, PasswordCrypt.crypt(newPass)) ) res = check match { case Some(true) => @@ -299,7 +326,7 @@ object OCollective { } def getContacts( - collective: Ident, + collective: CollectiveId, query: Option[String], kind: Option[ContactKind] ): Stream[F, RContact] = @@ -310,12 +337,13 @@ object OCollective { def generatePreviews( storeMode: MakePreviewArgs.StoreMode, - account: AccountId + collectiveId: CollectiveId, + submitter: UserTaskScope ): F[UpdateResult] = for { job <- JobFactory.allPreviews[F]( - AllPreviewsArgs(Some(account.collective), storeMode), - Some(account.user) + AllPreviewsArgs(Some(collectiveId), storeMode), + submitter ) _ <- jobStore.insertIfNew(job.encode) } yield UpdateResult.success diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OCustomFields.scala b/modules/backend/src/main/scala/docspell/backend/ops/OCustomFields.scala index a5416048..99a48143 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OCustomFields.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OCustomFields.scala @@ -36,13 +36,13 @@ trait OCustomFields[F[_]] { /** Find all fields using an optional query on the name and label */ def findAll( - coll: Ident, + coll: CollectiveId, nameQuery: Option[String], order: CustomFieldOrder ): F[Vector[CustomFieldData]] /** Find one field by its id */ - def findById(coll: Ident, fieldId: Ident): F[Option[CustomFieldData]] + def findById(coll: CollectiveId, fieldId: Ident): F[Option[CustomFieldData]] /** Create a new non-existing field. */ def create(field: NewCustomField): F[AddResult] @@ -51,7 +51,7 @@ trait OCustomFields[F[_]] { def change(field: RCustomField): F[UpdateResult] /** Deletes the field by name or id. */ - def delete(coll: Ident, fieldIdOrName: Ident): F[UpdateResult] + def delete(coll: CollectiveId, fieldIdOrName: Ident): F[UpdateResult] /** Sets a value given a field an an item. Existing values are overwritten. */ def setValue(item: Ident, value: SetValue): F[AttachedEvent[SetValueResult]] @@ -80,13 +80,13 @@ object OCustomFields { name: Ident, label: Option[String], ftype: CustomFieldType, - cid: Ident + cid: CollectiveId ) case class SetValue( field: Ident, value: String, - collective: Ident + collective: CollectiveId ) sealed trait SetValueResult @@ -106,7 +106,7 @@ object OCustomFields { case class RemoveValue( field: Ident, item: Nel[Ident], - collective: Ident + collective: CollectiveId ) sealed trait CustomFieldOrder @@ -158,7 +158,7 @@ object OCustomFields { store.transact(QCustomField.findAllValues(itemIds)) def findAll( - coll: Ident, + coll: CollectiveId, nameQuery: Option[String], order: CustomFieldOrder ): F[Vector[CustomFieldData]] = @@ -170,7 +170,7 @@ object OCustomFields { ) ) - def findById(coll: Ident, field: Ident): F[Option[CustomFieldData]] = + def findById(coll: CollectiveId, field: Ident): F[Option[CustomFieldData]] = store.transact(QCustomField.findById(field, coll)) def create(field: NewCustomField): F[AddResult] = { @@ -188,7 +188,7 @@ object OCustomFields { def change(field: RCustomField): F[UpdateResult] = UpdateResult.fromUpdate(store.transact(RCustomField.update(field))) - def delete(coll: Ident, fieldIdOrName: Ident): F[UpdateResult] = { + def delete(coll: CollectiveId, fieldIdOrName: Ident): F[UpdateResult] = { val update = for { field <- OptionT(RCustomField.findByIdOrName(fieldIdOrName, coll)) diff --git a/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala b/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala index ffb33e6d..6cec4b11 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala @@ -7,12 +7,10 @@ package docspell.backend.ops import java.security.MessageDigest - import cats.data.OptionT import cats.effect._ import cats.syntax.all._ import fs2.{Pipe, Stream} - import docspell.backend.JobFactory import docspell.backend.ops.ODownloadAll.model._ import docspell.backend.ops.OJob.JobCancelResult @@ -21,11 +19,11 @@ import docspell.common._ import docspell.query.ItemQuery.Expr.ValidItemStates import docspell.query.{ItemQuery, ItemQueryParser} import docspell.scheduler.JobStore +import docspell.scheduler.usertask.UserTaskScope import docspell.store.Store import docspell.store.file.FileMetadata import docspell.store.queries.{QItem, Query} import docspell.store.records.{RDownloadQuery, RFileMeta, RJob} - import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} import scodec.bits.ByteVector @@ -34,12 +32,12 @@ trait ODownloadAll[F[_]] { /** Calculates what kind of zip file would be created and checks the server thresholds. */ - def getSummary(account: AccountId, req: DownloadRequest): F[DownloadSummary] + def getSummary(account: AccountInfo, req: DownloadRequest): F[DownloadSummary] /** Same as `getSummary` but also submits the job to really create the zip file if * allowed and necessary. */ - def submit(accountId: AccountId, req: DownloadRequest): F[DownloadSummary] + def submit(accountId: AccountInfo, req: DownloadRequest): F[DownloadSummary] /** Given the id from the summary, cancels a running job. */ def cancelDownload(accountId: AccountId, id: Ident): F[OJob.JobCancelResult] @@ -65,7 +63,7 @@ object ODownloadAll { private[this] val logger = docspell.logging.getLogger[F] def getSummary( - account: AccountId, + account: AccountInfo, req: DownloadRequest ): F[DownloadSummary] = { val query = req.toQuery(account) @@ -83,16 +81,16 @@ object ODownloadAll { } def submit( - accountId: AccountId, + account: AccountInfo, req: DownloadRequest ): F[DownloadSummary] = for { _ <- logger.info(s"Download all request: $req") - summary <- getSummary(accountId, req) - args = DownloadZipArgs(accountId, req) + summary <- getSummary(account, req) + args = DownloadZipArgs(account.asAccountId, req) _ <- OptionT .whenF(summary.state == DownloadState.NotPresent) { JobFactory - .downloadZip(args, summary.id, accountId) + .downloadZip(args, summary.id, UserTaskScope(account)) .flatMap(job => logger.info(s"Submitting download all job: $job") *> jobStore .insertIfNew(job.encode) @@ -173,9 +171,9 @@ object ODownloadAll { maxFiles: Int, maxSize: ByteSize ) { - def toQuery(accountId: AccountId): Query = + def toQuery(account: AccountInfo): Query = Query - .all(accountId) + .all(account) .withFix(_.andQuery(ValidItemStates)) .withCond(_ => Query.QueryExpr(query.expr)) diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OEquipment.scala b/modules/backend/src/main/scala/docspell/backend/ops/OEquipment.scala index da0586f5..8e44a351 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OEquipment.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OEquipment.scala @@ -9,26 +9,25 @@ package docspell.backend.ops import cats.data.NonEmptyList import cats.effect.{Async, Resource} import cats.implicits._ - -import docspell.common.{AccountId, Ident} +import docspell.common._ import docspell.store.records.{REquipment, RItem} import docspell.store.{AddResult, Store} trait OEquipment[F[_]] { def findAll( - account: AccountId, + collectiveId: CollectiveId, nameQuery: Option[String], order: OEquipment.EquipmentOrder ): F[Vector[REquipment]] - def find(account: AccountId, id: Ident): F[Option[REquipment]] + def find(collectiveId: CollectiveId, id: Ident): F[Option[REquipment]] def add(s: REquipment): F[AddResult] def update(s: REquipment): F[AddResult] - def delete(id: Ident, collective: Ident): F[AddResult] + def delete(id: Ident, collective: CollectiveId): F[AddResult] } object OEquipment { @@ -58,16 +57,16 @@ object OEquipment { def apply[F[_]: Async](store: Store[F]): Resource[F, OEquipment[F]] = Resource.pure[F, OEquipment[F]](new OEquipment[F] { def findAll( - account: AccountId, + collective: CollectiveId, nameQuery: Option[String], order: EquipmentOrder ): F[Vector[REquipment]] = store.transact( - REquipment.findAll(account.collective, nameQuery, EquipmentOrder(order)) + REquipment.findAll(collective, nameQuery, EquipmentOrder(order)) ) - def find(account: AccountId, id: Ident): F[Option[REquipment]] = - store.transact(REquipment.findById(id)).map(_.filter(_.cid == account.collective)) + def find(cid: CollectiveId, id: Ident): F[Option[REquipment]] = + store.transact(REquipment.findById(id)).map(_.filter(_.cid == cid)) def add(e: REquipment): F[AddResult] = { def insert = REquipment.insert(e) @@ -85,7 +84,7 @@ object OEquipment { store.add(insert, exists).map(_.fold(identity, _.withMsg(msg), identity)) } - def delete(id: Ident, collective: Ident): F[AddResult] = { + def delete(id: Ident, collective: CollectiveId): F[AddResult] = { val io = for { n0 <- RItem.removeConcEquip(collective, id) n1 <- REquipment.delete(id, collective) diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala b/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala index cec3f431..ea8a95a5 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala @@ -7,8 +7,8 @@ package docspell.backend.ops import cats.data.{NonEmptyList => Nel} +import cats.syntax.option._ import cats.effect._ - import docspell.common._ import docspell.store.queries.QFolder import docspell.store.records.{RFolder, RUser} @@ -17,38 +17,43 @@ import docspell.store.{AddResult, Store} trait OFolder[F[_]] { def findAll( - account: AccountId, + collectiveId: CollectiveId, + userId: Ident, ownerLogin: Option[Ident], query: Option[String], order: OFolder.FolderOrder ): F[Vector[OFolder.FolderItem]] - def findById(id: Ident, account: AccountId): F[Option[OFolder.FolderDetail]] + def findById( + id: Ident, + collectiveId: CollectiveId, + userId: Ident + ): F[Option[OFolder.FolderDetail]] /** Adds a new folder. If `login` is non-empty, the `folder.user` property is ignored * and the user-id is determined by the given login name. */ - def add(folder: RFolder, login: Option[Ident]): F[AddResult] + def add(folder: RFolder, userId: Option[Ident]): F[AddResult] def changeName( folder: Ident, - account: AccountId, + userId: Ident, name: String ): F[OFolder.FolderChangeResult] def addMember( folder: Ident, - account: AccountId, + userId: Ident, member: Ident ): F[OFolder.FolderChangeResult] def removeMember( folder: Ident, - account: AccountId, + userId: Ident, member: Ident ): F[OFolder.FolderChangeResult] - def delete(id: Ident, account: AccountId): F[OFolder.FolderChangeResult] + def delete(id: Ident, userId: Ident): F[OFolder.FolderChangeResult] } object OFolder { @@ -94,23 +99,35 @@ object OFolder { def apply[F[_]](store: Store[F]): Resource[F, OFolder[F]] = Resource.pure[F, OFolder[F]](new OFolder[F] { def findAll( - account: AccountId, + collectiveId: CollectiveId, + userId: Ident, ownerLogin: Option[Ident], query: Option[String], order: FolderOrder ): F[Vector[FolderItem]] = store.transact( - QFolder.findAll(account, None, ownerLogin, query, FolderOrder(order)) + QFolder.findAll( + collectiveId, + userId, + None, + ownerLogin, + query, + FolderOrder(order) + ) ) - def findById(id: Ident, account: AccountId): F[Option[FolderDetail]] = - store.transact(QFolder.findById(id, account)) + def findById( + id: Ident, + collectiveId: CollectiveId, + userId: Ident + ): F[Option[FolderDetail]] = + store.transact(QFolder.findById(id, collectiveId, userId)) - def add(folder: RFolder, login: Option[Ident]): F[AddResult] = { - val insert = login match { - case Some(n) => + def add(folder: RFolder, userId: Option[Ident]): F[AddResult] = { + val insert = userId match { + case Some(uid) => for { - user <- RUser.findByAccount(AccountId(folder.collectiveId, n)) + user <- RUser.findById(uid, folder.collectiveId.some) s = user.map(u => folder.copy(owner = u.uid)).getOrElse(folder) n <- RFolder.insert(s) } yield n @@ -124,26 +141,26 @@ object OFolder { def changeName( folder: Ident, - account: AccountId, + userId: Ident, name: String ): F[FolderChangeResult] = - store.transact(QFolder.changeName(folder, account, name)) + store.transact(QFolder.changeName(folder, userId, name)) def addMember( folder: Ident, - account: AccountId, + userId: Ident, member: Ident ): F[FolderChangeResult] = - store.transact(QFolder.addMember(folder, account, member)) + store.transact(QFolder.addMember(folder, userId, member)) def removeMember( folder: Ident, - account: AccountId, + userId: Ident, member: Ident ): F[FolderChangeResult] = - store.transact(QFolder.removeMember(folder, account, member)) + store.transact(QFolder.removeMember(folder, userId, member)) - def delete(id: Ident, account: AccountId): F[FolderChangeResult] = - store.transact(QFolder.delete(id, account)) + def delete(id: Ident, userId: Ident): F[FolderChangeResult] = + store.transact(QFolder.delete(id, userId)) }) } diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OFulltext.scala b/modules/backend/src/main/scala/docspell/backend/ops/OFulltext.scala index 89b3fa82..553b9272 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OFulltext.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OFulltext.scala @@ -23,7 +23,7 @@ trait OFulltext[F[_]] { /** Clears the full-text index for the given collective and starts a task indexing all * their data. */ - def reindexCollective(account: AccountId): F[Unit] + def reindexCollective(cid: CollectiveId, submitterUserId: Option[Ident]): F[Unit] } object OFulltext { @@ -40,13 +40,13 @@ object OFulltext { _ <- jobStore.insertIfNew(job.encode) } yield () - def reindexCollective(account: AccountId): F[Unit] = + def reindexCollective(cid: CollectiveId, submitterUserId: Option[Ident]): F[Unit] = for { - _ <- logger.debug(s"Re-index collective: $account") + _ <- logger.debug(s"Re-index collective: $cid") exist <- store.transact( RJob.findNonFinalByTracker(DocspellSystem.migrationTaskTracker) ) - job <- JobFactory.reIndex(account) + job <- JobFactory.reIndex(cid, submitterUserId) _ <- if (exist.isDefined) ().pure[F] else jobStore.insertIfNew(job.encode) diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OItem.scala b/modules/backend/src/main/scala/docspell/backend/ops/OItem.scala index 79b799c0..7462bc18 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OItem.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OItem.scala @@ -9,7 +9,6 @@ package docspell.backend.ops import cats.data.{NonEmptyList => Nel, OptionT} import cats.effect.{Async, Resource} import cats.implicits._ - import docspell.backend.AttachedEvent import docspell.backend.JobFactory import docspell.backend.fulltext.CreateIndex @@ -19,10 +18,10 @@ import docspell.ftsclient.FtsClient import docspell.logging.Logger import docspell.notification.api.Event import docspell.scheduler.JobStore +import docspell.scheduler.usertask.UserTaskScope import docspell.store.queries.{QAttachment, QItem, QMoveAttachment} import docspell.store.records._ import docspell.store.{AddResult, Store, UpdateResult} - import doobie.implicits._ trait OItem[F[_]] { @@ -31,7 +30,7 @@ trait OItem[F[_]] { def setTags( item: Ident, tagIds: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] /** Sets tags for multiple items. The tags of the items will be replaced with the given @@ -40,11 +39,15 @@ trait OItem[F[_]] { def setTagsMultipleItems( items: Nel[Ident], tags: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] /** Create a new tag and add it to the item. */ - def addNewTag(collective: Ident, item: Ident, tag: RTag): F[AttachedEvent[AddResult]] + def addNewTag( + collective: CollectiveId, + item: Ident, + tag: RTag + ): F[AttachedEvent[AddResult]] /** Apply all tags to the given item. Tags must exist, but can be IDs or names. Existing * tags on the item are left unchanged. @@ -52,54 +55,58 @@ trait OItem[F[_]] { def linkTags( item: Ident, tags: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] def linkTagsMultipleItems( items: Nel[Ident], tags: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] def removeTagsOfCategories( item: Ident, - collective: Ident, + collective: CollectiveId, categories: Set[String] ): F[AttachedEvent[UpdateResult]] def removeTagsMultipleItems( items: Nel[Ident], tags: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] /** Toggles tags of the given item. Tags must exist, but can be IDs or names. */ def toggleTags( item: Ident, tags: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] def setDirection( item: Nel[Ident], direction: Direction, - collective: Ident + collective: CollectiveId ): F[UpdateResult] /** Set or remove the folder on an item. Folder can be the id or name. */ - def setFolder(item: Ident, folder: Option[String], collective: Ident): F[UpdateResult] + def setFolder( + item: Ident, + folder: Option[String], + collective: CollectiveId + ): F[UpdateResult] /** Set or remove the folder on multiple items. Folder can be the id or name. */ def setFolderMultiple( items: Nel[Ident], folder: Option[String], - collective: Ident + collective: CollectiveId ): F[UpdateResult] def setCorrOrg( items: Nel[Ident], org: Option[Ident], - collective: Ident + collective: CollectiveId ): F[UpdateResult] def addCorrOrg(item: Ident, org: OOrganization.OrgAndContacts): F[AddResult] @@ -107,7 +114,7 @@ trait OItem[F[_]] { def setCorrPerson( items: Nel[Ident], person: Option[Ident], - collective: Ident + collective: CollectiveId ): F[UpdateResult] def addCorrPerson(item: Ident, person: OOrganization.PersonAndContacts): F[AddResult] @@ -115,7 +122,7 @@ trait OItem[F[_]] { def setConcPerson( items: Nel[Ident], person: Option[Ident], - collective: Ident + collective: CollectiveId ): F[UpdateResult] def addConcPerson(item: Ident, person: OOrganization.PersonAndContacts): F[AddResult] @@ -123,64 +130,68 @@ trait OItem[F[_]] { def setConcEquip( items: Nel[Ident], equip: Option[Ident], - collective: Ident + collective: CollectiveId ): F[UpdateResult] def addConcEquip(item: Ident, equip: REquipment): F[AddResult] - def setNotes(item: Ident, notes: Option[String], collective: Ident): F[UpdateResult] + def setNotes( + item: Ident, + notes: Option[String], + collective: CollectiveId + ): F[UpdateResult] def addNotes( item: Ident, notes: String, separator: Option[String], - collective: Ident + collective: CollectiveId ): F[UpdateResult] - def setName(item: Ident, name: String, collective: Ident): F[UpdateResult] + def setName(item: Ident, name: String, collective: CollectiveId): F[UpdateResult] def setNameMultiple( items: Nel[Ident], name: String, - collective: Ident + collective: CollectiveId ): F[UpdateResult] - def setState(item: Ident, state: ItemState, collective: Ident): F[AddResult] = + def setState(item: Ident, state: ItemState, collective: CollectiveId): F[AddResult] = setStates(Nel.of(item), state, collective) def setStates( item: Nel[Ident], state: ItemState, - collective: Ident + collective: CollectiveId ): F[AddResult] - def restore(items: Nel[Ident], collective: Ident): F[UpdateResult] + def restore(items: Nel[Ident], collective: CollectiveId): F[UpdateResult] def setItemDate( item: Nel[Ident], date: Option[Timestamp], - collective: Ident + collective: CollectiveId ): F[UpdateResult] def setItemDueDate( item: Nel[Ident], date: Option[Timestamp], - collective: Ident + collective: CollectiveId ): F[UpdateResult] - def getProposals(item: Ident, collective: Ident): F[MetaProposalList] + def getProposals(item: Ident, collective: CollectiveId): F[MetaProposalList] - def deleteItem(itemId: Ident, collective: Ident): F[Int] + def deleteItem(itemId: Ident, collective: CollectiveId): F[Int] - def deleteItemMultiple(items: Nel[Ident], collective: Ident): F[Int] + def deleteItemMultiple(items: Nel[Ident], collective: CollectiveId): F[Int] - def deleteAttachment(id: Ident, collective: Ident): F[Int] + def deleteAttachment(id: Ident, collective: CollectiveId): F[Int] - def setDeletedState(items: Nel[Ident], collective: Ident): F[Int] + def setDeletedState(items: Nel[Ident], collective: CollectiveId): F[Int] def deleteAttachmentMultiple( attachments: Nel[Ident], - collective: Ident + collective: CollectiveId ): F[Int] def moveAttachmentBefore(itemId: Ident, source: Ident, target: Ident): F[AddResult] @@ -188,7 +199,7 @@ trait OItem[F[_]] { def setAttachmentName( attachId: Ident, name: Option[String], - collective: Ident + collective: CollectiveId ): F[UpdateResult] /** Submits the item for re-processing. The list of attachment ids can be used to only @@ -196,28 +207,30 @@ trait OItem[F[_]] { * attachments are reprocessed. This call only submits the job into the queue. */ def reprocess( + cid: CollectiveId, item: Ident, attachments: List[Ident], - account: AccountId + submitter: UserTaskScope ): F[UpdateResult] def reprocessAll( + cid: CollectiveId, items: Nel[Ident], - account: AccountId + submitter: UserTaskScope ): F[UpdateResult] /** Submits a task that finds all non-converted pdfs and triggers converting them using * ocrmypdf. Each file is converted by a separate task. */ def convertAllPdf( - collective: Option[Ident], - submitter: Option[Ident] + collective: Option[CollectiveId], + submitter: UserTaskScope ): F[UpdateResult] /** Submits a task that (re)generates the preview image for an attachment. */ def generatePreview( args: MakePreviewArgs, - account: AccountId + account: UserTaskScope ): F[UpdateResult] /** Submits a task that (re)generates the preview images for all attachments. */ @@ -227,7 +240,7 @@ trait OItem[F[_]] { def merge( logger: Logger[F], items: Nel[Ident], - collective: Ident + collective: CollectiveId ): F[UpdateResult] } @@ -248,7 +261,7 @@ object OItem { def merge( logger: Logger[F], items: Nel[Ident], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = Merge(logger, store, this, createIndex).merge(items, collective).attempt.map { case Right(Right(_)) => UpdateResult.success @@ -269,14 +282,14 @@ object OItem { def linkTags( item: Ident, tags: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] = linkTagsMultipleItems(Nel.of(item), tags, collective) def linkTagsMultipleItems( items: Nel[Ident], tags: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] = tags.distinct match { case Nil => AttachedEvent.only(UpdateResult.success).pure[F] @@ -305,7 +318,7 @@ object OItem { def removeTagsOfCategories( item: Ident, - collective: Ident, + collective: CollectiveId, categories: Set[String] ): F[AttachedEvent[UpdateResult]] = if (categories.isEmpty) { @@ -328,7 +341,7 @@ object OItem { def removeTagsMultipleItems( items: Nel[Ident], tags: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] = tags.distinct match { case Nil => AttachedEvent.only(UpdateResult.success).pure[F] @@ -354,7 +367,7 @@ object OItem { def toggleTags( item: Ident, tags: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] = tags.distinct match { case Nil => AttachedEvent.only(UpdateResult.success).pure[F] @@ -383,14 +396,14 @@ object OItem { def setTags( item: Ident, tagIds: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] = setTagsMultipleItems(Nel.of(item), tagIds, collective) def setTagsMultipleItems( items: Nel[Ident], tags: List[String], - collective: Ident + collective: CollectiveId ): F[AttachedEvent[UpdateResult]] = { val dbTask = for { @@ -411,7 +424,7 @@ object OItem { } def addNewTag( - collective: Ident, + collective: CollectiveId, item: Ident, tag: RTag ): F[AttachedEvent[AddResult]] = @@ -448,7 +461,7 @@ object OItem { def setDirection( items: Nel[Ident], direction: Direction, - collective: Ident + collective: CollectiveId ): F[UpdateResult] = UpdateResult.fromUpdate( store @@ -458,7 +471,7 @@ object OItem { def setFolder( item: Ident, folder: Option[String], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = for { result <- store.transact(RItem.updateFolder(item, collective, folder)).attempt @@ -478,7 +491,7 @@ object OItem { def setFolderMultiple( items: Nel[Ident], folder: Option[String], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = for { results <- items.traverse(i => setFolder(i, folder, collective)) @@ -499,7 +512,7 @@ object OItem { def setCorrOrg( items: Nel[Ident], org: Option[Ident], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = UpdateResult.fromUpdate( store @@ -533,7 +546,7 @@ object OItem { def setCorrPerson( items: Nel[Ident], person: Option[Ident], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = UpdateResult.fromUpdate( store @@ -571,7 +584,7 @@ object OItem { def setConcPerson( items: Nel[Ident], person: Option[Ident], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = UpdateResult.fromUpdate( store @@ -609,7 +622,7 @@ object OItem { def setConcEquip( items: Nel[Ident], equip: Option[Ident], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = UpdateResult.fromUpdate( store @@ -640,7 +653,7 @@ object OItem { def setNotes( item: Ident, notes: Option[String], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = UpdateResult .fromUpdate( @@ -662,7 +675,7 @@ object OItem { item: Ident, notes: String, separator: Option[String], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = store .transact(RItem.appendNotes(item, collective, notes, separator)) @@ -685,7 +698,11 @@ object OItem { UpdateResult.notFound.pure[F] } - def setName(item: Ident, name: String, collective: Ident): F[UpdateResult] = + def setName( + item: Ident, + name: String, + collective: CollectiveId + ): F[UpdateResult] = UpdateResult .fromUpdate( store @@ -705,7 +722,7 @@ object OItem { def setNameMultiple( items: Nel[Ident], name: String, - collective: Ident + collective: CollectiveId ): F[UpdateResult] = for { results <- items.traverse(i => setName(i, name, collective)) @@ -726,7 +743,7 @@ object OItem { def setStates( items: Nel[Ident], state: ItemState, - collective: Ident + collective: CollectiveId ): F[AddResult] = store .transact(RItem.updateStateForCollective(items, state, collective)) @@ -735,7 +752,7 @@ object OItem { def restore( items: Nel[Ident], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = UpdateResult.fromUpdate(for { n <- store @@ -748,7 +765,7 @@ object OItem { def setItemDate( items: Nel[Ident], date: Option[Timestamp], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = UpdateResult.fromUpdate( store @@ -758,42 +775,42 @@ object OItem { def setItemDueDate( items: Nel[Ident], date: Option[Timestamp], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = UpdateResult.fromUpdate( store .transact(RItem.updateDueDate(items, collective, date)) ) - def deleteItem(itemId: Ident, collective: Ident): F[Int] = + def deleteItem(itemId: Ident, collective: CollectiveId): F[Int] = QItem .delete(store)(itemId, collective) .flatTap(_ => fts.removeItem(logger, itemId)) - def deleteItemMultiple(items: Nel[Ident], collective: Ident): F[Int] = + def deleteItemMultiple(items: Nel[Ident], collective: CollectiveId): F[Int] = for { itemIds <- store.transact(RItem.filterItems(items, collective)) results <- itemIds.traverse(item => deleteItem(item, collective)) n = results.sum } yield n - def setDeletedState(items: Nel[Ident], collective: Ident): F[Int] = + def setDeletedState(items: Nel[Ident], collective: CollectiveId): F[Int] = for { n <- store.transact(RItem.setState(items, collective, ItemState.Deleted)) _ <- items.traverse(id => fts.removeItem(logger, id)) } yield n - def getProposals(item: Ident, collective: Ident): F[MetaProposalList] = + def getProposals(item: Ident, collective: CollectiveId): F[MetaProposalList] = store.transact(QAttachment.getMetaProposals(item, collective)) - def deleteAttachment(id: Ident, collective: Ident): F[Int] = + def deleteAttachment(id: Ident, collective: CollectiveId): F[Int] = QAttachment .deleteSingleAttachment(store)(id, collective) .flatTap(_ => fts.removeAttachment(logger, id)) def deleteAttachmentMultiple( attachments: Nel[Ident], - collective: Ident + collective: CollectiveId ): F[Int] = for { attachmentIds <- store.transact( @@ -808,7 +825,7 @@ object OItem { def setAttachmentName( attachId: Ident, name: Option[String], - collective: Ident + collective: CollectiveId ): F[UpdateResult] = UpdateResult .fromUpdate( @@ -833,49 +850,52 @@ object OItem { ) def reprocess( + cid: CollectiveId, item: Ident, attachments: List[Ident], - account: AccountId + submitter: UserTaskScope ): F[UpdateResult] = (for { _ <- OptionT( - store.transact(RItem.findByIdAndCollective(item, account.collective)) + store.transact(RItem.findByIdAndCollective(item, cid)) ) args = ReProcessItemArgs(item, attachments) job <- OptionT.liftF( - JobFactory.reprocessItem[F](args, account, Priority.Low) + JobFactory.reprocessItem[F](args, submitter, Priority.Low) ) _ <- OptionT.liftF(jobStore.insertIfNew(job.encode)) } yield UpdateResult.success).getOrElse(UpdateResult.notFound) def reprocessAll( + cid: CollectiveId, items: Nel[Ident], - account: AccountId + submitter: UserTaskScope ): F[UpdateResult] = UpdateResult.fromUpdate(for { - items <- store.transact(RItem.filterItems(items, account.collective)) + items <- store.transact(RItem.filterItems(items, cid)) jobs <- items .map(item => ReProcessItemArgs(item, Nil)) - .traverse(arg => JobFactory.reprocessItem[F](arg, account, Priority.Low)) + .traverse(arg => JobFactory.reprocessItem[F](arg, submitter, Priority.Low)) .map(_.map(_.encode)) _ <- jobStore.insertAllIfNew(jobs) } yield items.size) def convertAllPdf( - collective: Option[Ident], - submitter: Option[Ident] + collective: Option[CollectiveId], + submitter: UserTaskScope ): F[UpdateResult] = for { - job <- JobFactory.convertAllPdfs[F](collective, submitter, Priority.Low) + job <- JobFactory + .convertAllPdfs[F](ConvertAllPdfArgs(collective), submitter, Priority.Low) _ <- jobStore.insertIfNew(job.encode) } yield UpdateResult.success def generatePreview( args: MakePreviewArgs, - account: AccountId + submitter: UserTaskScope ): F[UpdateResult] = for { - job <- JobFactory.makePreview[F](args, account.some) + job <- JobFactory.makePreview[F](args, submitter) _ <- jobStore.insertIfNew(job.encode) } yield UpdateResult.success @@ -883,7 +903,8 @@ object OItem { storeMode: MakePreviewArgs.StoreMode ): F[UpdateResult] = for { - job <- JobFactory.allPreviews[F](AllPreviewsArgs(None, storeMode), None) + job <- JobFactory + .allPreviews[F](AllPreviewsArgs(None, storeMode), UserTaskScope.system) _ <- jobStore.insertIfNew(job.encode) } yield UpdateResult.success diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OItemLink.scala b/modules/backend/src/main/scala/docspell/backend/ops/OItemLink.scala index 457c2b42..79e68c42 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OItemLink.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OItemLink.scala @@ -9,10 +9,9 @@ package docspell.backend.ops import cats.data.NonEmptyList import cats.effect._ import cats.implicits._ - import docspell.backend.ops.OItemLink.LinkResult import docspell.backend.ops.search.OSearch -import docspell.common.{AccountId, Ident} +import docspell.common._ import docspell.query.ItemQuery import docspell.query.ItemQueryDsl._ import docspell.store.qb.Batch @@ -22,12 +21,16 @@ import docspell.store.{AddResult, Store} trait OItemLink[F[_]] { - def addAll(cid: Ident, target: Ident, related: NonEmptyList[Ident]): F[LinkResult] + def addAll( + cid: CollectiveId, + target: Ident, + related: NonEmptyList[Ident] + ): F[LinkResult] - def removeAll(cid: Ident, target: Ident, related: NonEmptyList[Ident]): F[Unit] + def removeAll(cid: CollectiveId, target: Ident, related: NonEmptyList[Ident]): F[Unit] def getRelated( - account: AccountId, + account: AccountInfo, item: Ident, batch: Batch ): F[Vector[ListItemWithTags]] @@ -48,12 +51,12 @@ object OItemLink { def apply[F[_]: Sync](store: Store[F], search: OSearch[F]): OItemLink[F] = new OItemLink[F] { def getRelated( - accountId: AccountId, + accountId: AccountInfo, item: Ident, batch: Batch ): F[Vector[ListItemWithTags]] = store - .transact(RItemLink.findLinked(accountId.collective, item)) + .transact(RItemLink.findLinked(accountId.collectiveId, item)) .map(ids => NonEmptyList.fromList(ids.toList)) .flatMap { case Some(nel) => @@ -69,14 +72,22 @@ object OItemLink { Vector.empty[ListItemWithTags].pure[F] } - def addAll(cid: Ident, target: Ident, related: NonEmptyList[Ident]): F[LinkResult] = + def addAll( + cid: CollectiveId, + target: Ident, + related: NonEmptyList[Ident] + ): F[LinkResult] = if (related.contains_(target)) LinkResult.linkTargetItemError.pure[F] else related.traverse(addSingle(cid, target, _)).as(LinkResult.Success) - def removeAll(cid: Ident, target: Ident, related: NonEmptyList[Ident]): F[Unit] = + def removeAll( + cid: CollectiveId, + target: Ident, + related: NonEmptyList[Ident] + ): F[Unit] = store.transact(RItemLink.deleteAll(cid, target, related)).void - def addSingle(cid: Ident, target: Ident, related: Ident): F[Unit] = { + def addSingle(cid: CollectiveId, target: Ident, related: Ident): F[Unit] = { val exists = RItemLink.exists(cid, target, related) val insert = RItemLink.insertNew(cid, target, related) store.add(insert, exists).flatMap { diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OItemSearch.scala b/modules/backend/src/main/scala/docspell/backend/ops/OItemSearch.scala index b56b850a..ee72d7b9 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OItemSearch.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OItemSearch.scala @@ -21,32 +21,39 @@ import docspell.store.records._ import doobie.implicits._ trait OItemSearch[F[_]] { - def findItem(id: Ident, collective: Ident): F[Option[ItemData]] + def findItem(id: Ident, collective: CollectiveId): F[Option[ItemData]] - def findDeleted(collective: Ident, maxUpdate: Timestamp, limit: Int): F[Vector[RItem]] + def findDeleted( + collective: CollectiveId, + maxUpdate: Timestamp, + limit: Int + ): F[Vector[RItem]] - def findAttachment(id: Ident, collective: Ident): F[Option[AttachmentData[F]]] + def findAttachment(id: Ident, collective: CollectiveId): F[Option[AttachmentData[F]]] def findAttachmentSource( id: Ident, - collective: Ident + collective: CollectiveId ): F[Option[AttachmentSourceData[F]]] def findAttachmentArchive( id: Ident, - collective: Ident + collective: CollectiveId ): F[Option[AttachmentArchiveData[F]]] def findAttachmentPreview( id: Ident, - collective: Ident + collective: CollectiveId ): F[Option[AttachmentPreviewData[F]]] - def findItemPreview(item: Ident, collective: Ident): F[Option[AttachmentPreviewData[F]]] + def findItemPreview( + item: Ident, + collective: CollectiveId + ): F[Option[AttachmentPreviewData[F]]] - def findAttachmentMeta(id: Ident, collective: Ident): F[Option[RAttachmentMeta]] + def findAttachmentMeta(id: Ident, collective: CollectiveId): F[Option[RAttachmentMeta]] - def findByFileCollective(checksum: String, collective: Ident): F[Vector[RItem]] + def findByFileCollective(checksum: String, collective: CollectiveId): F[Vector[RItem]] def findByFileSource(checksum: String, sourceId: Ident): F[Option[Vector[RItem]]] @@ -114,12 +121,12 @@ object OItemSearch { def apply[F[_]: Async](store: Store[F]): Resource[F, OItemSearch[F]] = Resource.pure[F, OItemSearch[F]](new OItemSearch[F] { - def findItem(id: Ident, collective: Ident): F[Option[ItemData]] = + def findItem(id: Ident, collective: CollectiveId): F[Option[ItemData]] = store .transact(QItem.findItem(id, collective)) def findDeleted( - collective: Ident, + collective: CollectiveId, maxUpdate: Timestamp, limit: Int ): F[Vector[RItem]] = @@ -129,7 +136,10 @@ object OItemSearch { .compile .toVector - def findAttachment(id: Ident, collective: Ident): F[Option[AttachmentData[F]]] = + def findAttachment( + id: Ident, + collective: CollectiveId + ): F[Option[AttachmentData[F]]] = store .transact(RAttachment.findByIdAndCollective(id, collective)) .flatMap { @@ -148,7 +158,7 @@ object OItemSearch { def findAttachmentSource( id: Ident, - collective: Ident + collective: CollectiveId ): F[Option[AttachmentSourceData[F]]] = store .transact(RAttachmentSource.findByIdAndCollective(id, collective)) @@ -168,7 +178,7 @@ object OItemSearch { def findAttachmentPreview( id: Ident, - collective: Ident + collective: CollectiveId ): F[Option[AttachmentPreviewData[F]]] = store .transact(RAttachmentPreview.findByIdAndCollective(id, collective)) @@ -188,7 +198,7 @@ object OItemSearch { def findItemPreview( item: Ident, - collective: Ident + collective: CollectiveId ): F[Option[AttachmentPreviewData[F]]] = store .transact(RAttachmentPreview.findByItemAndCollective(item, collective)) @@ -208,7 +218,7 @@ object OItemSearch { def findAttachmentArchive( id: Ident, - collective: Ident + collective: CollectiveId ): F[Option[AttachmentArchiveData[F]]] = store .transact(RAttachmentArchive.findByIdAndCollective(id, collective)) @@ -234,15 +244,21 @@ object OItemSearch { .map(fm => f(fm)) .value - def findAttachmentMeta(id: Ident, collective: Ident): F[Option[RAttachmentMeta]] = + def findAttachmentMeta( + id: Ident, + collective: CollectiveId + ): F[Option[RAttachmentMeta]] = store.transact(QAttachment.getAttachmentMeta(id, collective)) - def findByFileCollective(checksum: String, collective: Ident): F[Vector[RItem]] = + def findByFileCollective( + checksum: String, + collective: CollectiveId + ): F[Vector[RItem]] = store.transact(QItem.findByChecksum(checksum, collective, Set.empty)) def findByFileSource(checksum: String, sourceId: Ident): F[Option[Vector[RItem]]] = store.transact((for { - coll <- OptionT(RSource.findCollective(sourceId)) + coll <- OptionT(RSource.findCollectiveId(sourceId)) items <- OptionT.liftF(QItem.findByChecksum(checksum, coll, Set.empty)) } yield items).value) }) diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OMail.scala b/modules/backend/src/main/scala/docspell/backend/ops/OMail.scala index ca3e0fc6..d8d8f129 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OMail.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OMail.scala @@ -21,33 +21,38 @@ import emil._ trait OMail[F[_]] { - def getSmtpSettings(accId: AccountId, nameQ: Option[String]): F[Vector[RUserEmail]] + def getSmtpSettings(userId: Ident, nameQ: Option[String]): F[Vector[RUserEmail]] - def findSmtpSettings(accId: AccountId, name: Ident): OptionT[F, RUserEmail] + def findSmtpSettings(userId: Ident, name: Ident): OptionT[F, RUserEmail] - def createSmtpSettings(accId: AccountId, data: SmtpSettings): F[AddResult] + def createSmtpSettings(userId: Ident, data: SmtpSettings): F[AddResult] - def updateSmtpSettings(accId: AccountId, name: Ident, data: OMail.SmtpSettings): F[Int] + def updateSmtpSettings(userId: Ident, name: Ident, data: OMail.SmtpSettings): F[Int] - def deleteSmtpSettings(accId: AccountId, name: Ident): F[Int] + def deleteSmtpSettings(userId: Ident, name: Ident): F[Int] - def getImapSettings(accId: AccountId, nameQ: Option[String]): F[Vector[RUserImap]] + def getImapSettings(userId: Ident, nameQ: Option[String]): F[Vector[RUserImap]] - def findImapSettings(accId: AccountId, name: Ident): OptionT[F, RUserImap] + def findImapSettings(userId: Ident, name: Ident): OptionT[F, RUserImap] - def createImapSettings(accId: AccountId, data: ImapSettings): F[AddResult] + def createImapSettings(userId: Ident, data: ImapSettings): F[AddResult] - def updateImapSettings(accId: AccountId, name: Ident, data: OMail.ImapSettings): F[Int] + def updateImapSettings(userId: Ident, name: Ident, data: OMail.ImapSettings): F[Int] - def deleteImapSettings(accId: AccountId, name: Ident): F[Int] + def deleteImapSettings(userId: Ident, name: Ident): F[Int] - def sendMail(accId: AccountId, name: Ident, m: ItemMail): F[SendResult] + def sendMail( + userId: Ident, + collectiveId: CollectiveId, + name: Ident, + m: ItemMail + ): F[SendResult] - def getSentMailsForItem(accId: AccountId, itemId: Ident): F[Vector[Sent]] + def getSentMailsForItem(collectiveId: CollectiveId, itemId: Ident): F[Vector[Sent]] - def getSentMail(accId: AccountId, mailId: Ident): OptionT[F, Sent] + def getSentMail(collectiveId: CollectiveId, mailId: Ident): OptionT[F, Sent] - def deleteSentMail(accId: AccountId, mailId: Ident): F[Int] + def deleteSentMail(collectiveId: CollectiveId, mailId: Ident): F[Int] } object OMail { @@ -124,9 +129,9 @@ object OMail { mailReplyTo: Option[MailAddress] ) { - def toRecord(accId: AccountId) = - RUserEmail.fromAccount( - accId, + def toRecord(userId: Ident) = + RUserEmail.fromUser( + userId, name, smtpHost, smtpPort, @@ -150,9 +155,9 @@ object OMail { imapOAuth2: Boolean ) { - def toRecord(accId: AccountId) = - RUserImap.fromAccount( - accId, + def toRecord(userId: Ident) = + RUserImap.fromUser( + userId, name, imapHost, imapPort, @@ -167,74 +172,79 @@ object OMail { def apply[F[_]: Async](store: Store[F], emil: Emil[F]): Resource[F, OMail[F]] = Resource.pure[F, OMail[F]](new OMail[F] { def getSmtpSettings( - accId: AccountId, + userId: Ident, nameQ: Option[String] ): F[Vector[RUserEmail]] = - store.transact(RUserEmail.findByAccount(accId, nameQ)) + store.transact(RUserEmail.findByAccount(userId, nameQ)) - def findSmtpSettings(accId: AccountId, name: Ident): OptionT[F, RUserEmail] = - OptionT(store.transact(RUserEmail.getByName(accId, name))) + def findSmtpSettings(userId: Ident, name: Ident): OptionT[F, RUserEmail] = + OptionT(store.transact(RUserEmail.getByName(userId, name))) - def createSmtpSettings(accId: AccountId, s: SmtpSettings): F[AddResult] = + def createSmtpSettings(userId: Ident, s: SmtpSettings): F[AddResult] = (for { - ru <- OptionT(store.transact(s.toRecord(accId).value)) + ru <- OptionT(store.transact(s.toRecord(userId).value)) ins = RUserEmail.insert(ru) exists = RUserEmail.exists(ru.uid, ru.name) res <- OptionT.liftF(store.add(ins, exists)) } yield res).getOrElse(AddResult.Failure(new Exception("User not found"))) def updateSmtpSettings( - accId: AccountId, + userId: Ident, name: Ident, data: SmtpSettings ): F[Int] = { val op = for { - um <- OptionT(RUserEmail.getByName(accId, name)) - ru <- data.toRecord(accId) + um <- OptionT(RUserEmail.getByName(userId, name)) + ru <- data.toRecord(userId) n <- OptionT.liftF(RUserEmail.update(um.id, ru)) } yield n store.transact(op.value).map(_.getOrElse(0)) } - def deleteSmtpSettings(accId: AccountId, name: Ident): F[Int] = - store.transact(RUserEmail.delete(accId, name)) + def deleteSmtpSettings(userId: Ident, name: Ident): F[Int] = + store.transact(RUserEmail.delete(userId, name)) - def getImapSettings(accId: AccountId, nameQ: Option[String]): F[Vector[RUserImap]] = - store.transact(RUserImap.findByAccount(accId, nameQ)) + def getImapSettings(userId: Ident, nameQ: Option[String]): F[Vector[RUserImap]] = + store.transact(RUserImap.findByAccount(userId, nameQ)) - def findImapSettings(accId: AccountId, name: Ident): OptionT[F, RUserImap] = - OptionT(store.transact(RUserImap.getByName(accId, name))) + def findImapSettings(userId: Ident, name: Ident): OptionT[F, RUserImap] = + OptionT(store.transact(RUserImap.getByName(userId, name))) - def createImapSettings(accId: AccountId, data: ImapSettings): F[AddResult] = + def createImapSettings(userId: Ident, data: ImapSettings): F[AddResult] = (for { - ru <- OptionT(store.transact(data.toRecord(accId).value)) + ru <- OptionT(store.transact(data.toRecord(userId).value)) ins = RUserImap.insert(ru) exists = RUserImap.exists(ru.uid, ru.name) res <- OptionT.liftF(store.add(ins, exists)) } yield res).getOrElse(AddResult.Failure(new Exception("User not found"))) def updateImapSettings( - accId: AccountId, + userId: Ident, name: Ident, data: OMail.ImapSettings ): F[Int] = { val op = for { - um <- OptionT(RUserImap.getByName(accId, name)) - ru <- data.toRecord(accId) + um <- OptionT(RUserImap.getByName(userId, name)) + ru <- data.toRecord(userId) n <- OptionT.liftF(RUserImap.update(um.id, ru)) } yield n store.transact(op.value).map(_.getOrElse(0)) } - def deleteImapSettings(accId: AccountId, name: Ident): F[Int] = - store.transact(RUserImap.delete(accId, name)) + def deleteImapSettings(userId: Ident, name: Ident): F[Int] = + store.transact(RUserImap.delete(userId, name)) - def sendMail(accId: AccountId, name: Ident, m: ItemMail): F[SendResult] = { + def sendMail( + userId: Ident, + collectiveId: CollectiveId, + name: Ident, + m: ItemMail + ): F[SendResult] = { val getSmtpSettings: OptionT[F, RUserEmail] = - OptionT(store.transact(RUserEmail.getByName(accId, name))) + OptionT(store.transact(RUserEmail.getByName(userId, name))) def createMail(sett: RUserEmail): OptionT[F, Mail[F]] = { import _root_.emil.builder._ @@ -243,7 +253,7 @@ object OMail { _ <- OptionT.liftF(store.transact(RItem.existsById(m.item))).filter(identity) ras <- OptionT.liftF( store.transact( - RAttachment.findByItemAndCollectiveWithMeta(m.item, accId.collective) + RAttachment.findByItemAndCollectiveWithMeta(m.item, collectiveId) ) ) } yield { @@ -275,7 +285,7 @@ object OMail { val save = for { data <- RSentMail.forItem( m.item, - accId, + userId, msgId, cfg.mailFrom, name, @@ -307,17 +317,20 @@ object OMail { } yield conv).getOrElse(SendResult.NotFound) } - def getSentMailsForItem(accId: AccountId, itemId: Ident): F[Vector[Sent]] = + def getSentMailsForItem( + collectiveId: CollectiveId, + itemId: Ident + ): F[Vector[Sent]] = store - .transact(QMails.findMails(accId.collective, itemId)) + .transact(QMails.findMails(collectiveId, itemId)) .map(_.map(t => Sent.create(t._1, t._2))) - def getSentMail(accId: AccountId, mailId: Ident): OptionT[F, Sent] = - OptionT(store.transact(QMails.findMail(accId.collective, mailId))).map(t => + def getSentMail(collectiveId: CollectiveId, mailId: Ident): OptionT[F, Sent] = + OptionT(store.transact(QMails.findMail(collectiveId, mailId))).map(t => Sent.create(t._1, t._2) ) - def deleteSentMail(accId: AccountId, mailId: Ident): F[Int] = - store.transact(QMails.delete(accId.collective, mailId)) + def deleteSentMail(collectiveId: CollectiveId, mailId: Ident): F[Int] = + store.transact(QMails.delete(collectiveId, mailId)) }) } diff --git a/modules/backend/src/main/scala/docspell/backend/ops/ONotification.scala b/modules/backend/src/main/scala/docspell/backend/ops/ONotification.scala index 05b58275..445efab6 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/ONotification.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/ONotification.scala @@ -39,35 +39,35 @@ trait ONotification[F[_]] { def findNotificationChannel( ref: ChannelRef, - account: AccountId + userId: Ident ): F[Vector[NotificationChannel]] - def listChannels(account: AccountId): F[Vector[Channel]] + def listChannels(userId: Ident): F[Vector[Channel]] - def deleteChannel(id: Ident, account: AccountId): F[UpdateResult] + def deleteChannel(id: Ident, userId: Ident): F[UpdateResult] - def createChannel(channel: Channel, account: AccountId): F[AddResult] + def createChannel(channel: Channel, userId: Ident): F[AddResult] - def updateChannel(channel: Channel, account: AccountId): F[UpdateResult] + def updateChannel(channel: Channel, userId: Ident): F[UpdateResult] - def listHooks(account: AccountId): F[Vector[Hook]] + def listHooks(userId: Ident): F[Vector[Hook]] - def deleteHook(id: Ident, account: AccountId): F[UpdateResult] + def deleteHook(id: Ident, userId: Ident): F[UpdateResult] - def createHook(hook: Hook, account: AccountId): F[AddResult] + def createHook(hook: Hook, userId: Ident): F[AddResult] - def updateHook(hook: Hook, account: AccountId): F[UpdateResult] + def updateHook(hook: Hook, userId: Ident): F[UpdateResult] def sampleEvent( evt: EventType, - account: AccountId, + account: AccountInfo, baseUrl: Option[LenientUri] ): F[EventContext] def sendSampleEvent( evt: EventType, channel: Nel[ChannelRef], - account: AccountId, + account: AccountInfo, baseUrl: Option[LenientUri] ): F[ONotification.SendTestResult] } @@ -81,13 +81,6 @@ object ONotification { Resource.pure[F, ONotification[F]](new ONotification[F] { val log = docspell.logging.getLogger[F] - def withUserId[A]( - account: AccountId - )(f: Ident => F[UpdateResult]): F[UpdateResult] = - OptionT(store.transact(RUser.findIdByAccount(account))) - .semiflatMap(f) - .getOrElse(UpdateResult.notFound) - def offerEvents(ev: Iterable[Event]): F[Unit] = ev.toList.traverse(notMod.offer).as(()) @@ -100,7 +93,7 @@ object ONotification { def sampleEvent( evt: EventType, - account: AccountId, + account: AccountInfo, baseUrl: Option[LenientUri] ): F[EventContext] = Event @@ -110,14 +103,14 @@ object ONotification { def sendSampleEvent( evt: EventType, channels: Nel[ChannelRef], - account: AccountId, + account: AccountInfo, baseUrl: Option[LenientUri] ): F[SendTestResult] = (for { ev <- sampleEvent(evt, account, baseUrl) logbuf <- Logger.buffer() ch <- channels.toList.toVector.flatTraverse( - findNotificationChannel(_, account) + findNotificationChannel(_, account.userId) ) _ <- notMod.send(logbuf._2.andThen(log), ev, ch) logs <- logbuf._1.get @@ -131,54 +124,51 @@ object ONotification { SendTestResult(false, Vector(ev)) } - def listChannels(account: AccountId): F[Vector[Channel]] = + def listChannels(userId: Ident): F[Vector[Channel]] = store - .transact(RNotificationChannel.getByAccount(account)) + .transact(RNotificationChannel.getByAccount(userId)) .map(_.map(ChannelConv.makeChannel)) - def deleteChannel(id: Ident, account: AccountId): F[UpdateResult] = + def deleteChannel(id: Ident, userId: Ident): F[UpdateResult] = UpdateResult .fromUpdate( - store.transact(RNotificationChannel.deleteByAccount(id, account)) + store.transact(RNotificationChannel.deleteByAccount(id, userId)) ) - .flatTap(_ => log.info(s"Deleted channel ${id.id} for ${account.asString}")) + .flatTap(_ => log.info(s"Deleted channel ${id.id} for ${userId.id}")) - def createChannel(channel: Channel, account: AccountId): F[AddResult] = + def createChannel(channel: Channel, userId: Ident): F[AddResult] = (for { newId <- OptionT.liftF(Ident.randomId[F]) - userId <- OptionT(store.transact(RUser.findIdByAccount(account))) r <- ChannelConv.makeRecord[F](store, channel, newId, userId) _ <- OptionT.liftF(store.transact(RNotificationChannel.insert(r))) - _ <- OptionT.liftF(log.debug(s"Created channel $r for $account")) + _ <- OptionT.liftF(log.debug(s"Created channel $r for ${userId.id}")) } yield AddResult.Success) .getOrElse(AddResult.failure(new Exception("User not found!"))) - def updateChannel(channel: Channel, account: AccountId): F[UpdateResult] = + def updateChannel(channel: Channel, userId: Ident): F[UpdateResult] = (for { - userId <- OptionT(store.transact(RUser.findIdByAccount(account))) r <- ChannelConv.makeRecord[F](store, channel, channel.id, userId) n <- OptionT.liftF(store.transact(RNotificationChannel.update(r))) } yield UpdateResult.fromUpdateRows(n)).getOrElse(UpdateResult.notFound) - def listHooks(account: AccountId): F[Vector[Hook]] = + def listHooks(userId: Ident): F[Vector[Hook]] = store.transact(for { - list <- RNotificationHook.findAllByAccount(account) + list <- RNotificationHook.findAllByAccount(userId) res <- list.traverse((Hook.fromRecord _).tupled) } yield res) - def deleteHook(id: Ident, account: AccountId): F[UpdateResult] = + def deleteHook(id: Ident, userId: Ident): F[UpdateResult] = UpdateResult - .fromUpdate(store.transact(RNotificationHook.deleteByAccount(id, account))) + .fromUpdate(store.transact(RNotificationHook.deleteByAccount(id, userId))) - def createHook(hook: Hook, account: AccountId): F[AddResult] = + def createHook(hook: Hook, userId: Ident): F[AddResult] = (for { _ <- OptionT.liftF(log.debug(s"Creating new notification hook: $hook")) - userId <- OptionT(store.transact(RUser.findIdByAccount(account))) hr <- OptionT.liftF(Hook.makeRecord(userId, hook)) _ <- OptionT.liftF( store.transact( RNotificationHook.insert(hr) *> RNotificationHookChannel - .updateAll(hr.id, hook.channels.toList) + .updateAll(hr.id, hook.channels) ) ) _ <- OptionT.liftF( @@ -187,13 +177,11 @@ object ONotification { } yield AddResult.Success) .getOrElse(AddResult.failure(new Exception("User or channel not found!"))) - def updateHook(hook: Hook, account: AccountId): F[UpdateResult] = { + def updateHook(hook: Hook, userId: Ident): F[UpdateResult] = { def withHook(f: RNotificationHook => F[UpdateResult]): F[UpdateResult] = - withUserId(account)(userId => - OptionT(store.transact(RNotificationHook.getById(hook.id, userId))) - .semiflatMap(f) - .getOrElse(UpdateResult.notFound) - ) + OptionT(store.transact(RNotificationHook.getById(hook.id, userId))) + .semiflatMap(f) + .getOrElse(UpdateResult.notFound) def doUpdate(r: RNotificationHook): F[UpdateResult] = UpdateResult.fromUpdate(store.transact(for { @@ -201,10 +189,7 @@ object ONotification { r.id, if (hook.allEvents) Nil else hook.events ) - nc <- RNotificationHookChannel.updateAll( - r.id, - hook.channels.toList - ) + nc <- RNotificationHookChannel.updateAll(r.id, hook.channels) nr <- RNotificationHook.update( r.copy( enabled = hook.enabled, @@ -230,10 +215,9 @@ object ONotification { def findNotificationChannel( ref: ChannelRef, - accountId: AccountId + userId: Ident ): F[Vector[NotificationChannel]] = (for { - userId <- OptionT(store.transact(RUser.findIdByAccount(accountId))) rec <- OptionT(store.transact(RNotificationChannel.getByRef(ref, userId))) ch <- OptionT.liftF(store.transact(QNotification.readChannel(rec))) } yield ch).getOrElse(Vector.empty) diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OOrganization.scala b/modules/backend/src/main/scala/docspell/backend/ops/OOrganization.scala index e2262161..c8a6134e 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OOrganization.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OOrganization.scala @@ -18,14 +18,15 @@ import docspell.store.records._ trait OOrganization[F[_]] { def findAllOrg( - account: AccountId, + collectiveId: CollectiveId, query: Option[String], order: OrganizationOrder ): F[Vector[OrgAndContacts]] - def findOrg(account: AccountId, orgId: Ident): F[Option[OrgAndContacts]] + + def findOrg(collectiveId: CollectiveId, orgId: Ident): F[Option[OrgAndContacts]] def findAllOrgRefs( - account: AccountId, + collectiveId: CollectiveId, nameQuery: Option[String], order: OrganizationOrder ): F[Vector[IdRef]] @@ -35,15 +36,15 @@ trait OOrganization[F[_]] { def updateOrg(s: OrgAndContacts): F[AddResult] def findAllPerson( - account: AccountId, + collectiveId: CollectiveId, query: Option[String], order: PersonOrder ): F[Vector[PersonAndContacts]] - def findPerson(account: AccountId, persId: Ident): F[Option[PersonAndContacts]] + def findPerson(collectiveId: CollectiveId, persId: Ident): F[Option[PersonAndContacts]] def findAllPersonRefs( - account: AccountId, + collectiveId: CollectiveId, nameQuery: Option[String], order: PersonOrder ): F[Vector[IdRef]] @@ -54,9 +55,9 @@ trait OOrganization[F[_]] { /** Update a person with their contacts. The additional organization is ignored. */ def updatePerson(s: PersonAndContacts): F[AddResult] - def deleteOrg(orgId: Ident, collective: Ident): F[AddResult] + def deleteOrg(orgId: Ident, collective: CollectiveId): F[AddResult] - def deletePerson(personId: Ident, collective: Ident): F[AddResult] + def deletePerson(personId: Ident, collective: CollectiveId): F[AddResult] } object OOrganization { @@ -134,32 +135,32 @@ object OOrganization { Resource.pure[F, OOrganization[F]](new OOrganization[F] { def findAllOrg( - account: AccountId, + collectiveId: CollectiveId, query: Option[String], order: OrganizationOrder ): F[Vector[OrgAndContacts]] = store .transact( QOrganization - .findOrgAndContact(account.collective, query, OrganizationOrder(order)) + .findOrgAndContact(collectiveId, query, OrganizationOrder(order)) ) .map { case (org, cont) => OrgAndContacts(org, cont) } .compile .toVector - def findOrg(account: AccountId, orgId: Ident): F[Option[OrgAndContacts]] = + def findOrg(collectiveId: CollectiveId, orgId: Ident): F[Option[OrgAndContacts]] = store - .transact(QOrganization.getOrgAndContact(account.collective, orgId)) + .transact(QOrganization.getOrgAndContact(collectiveId, orgId)) .map(_.map { case (org, cont) => OrgAndContacts(org, cont) }) def findAllOrgRefs( - account: AccountId, + collectiveId: CollectiveId, nameQuery: Option[String], order: OrganizationOrder ): F[Vector[IdRef]] = store.transact( ROrganization.findAllRef( - account.collective, + collectiveId, nameQuery, OrganizationOrder(order) ) @@ -172,31 +173,34 @@ object OOrganization { QOrganization.updateOrg(s.org, s.contacts, s.org.cid)(store) def findAllPerson( - account: AccountId, + collectiveId: CollectiveId, query: Option[String], order: PersonOrder ): F[Vector[PersonAndContacts]] = store .transact( QOrganization - .findPersonAndContact(account.collective, query, PersonOrder(order)) + .findPersonAndContact(collectiveId, query, PersonOrder(order)) ) .map { case (person, org, cont) => PersonAndContacts(person, org, cont) } .compile .toVector - def findPerson(account: AccountId, persId: Ident): F[Option[PersonAndContacts]] = + def findPerson( + collectiveId: CollectiveId, + persId: Ident + ): F[Option[PersonAndContacts]] = store - .transact(QOrganization.getPersonAndContact(account.collective, persId)) + .transact(QOrganization.getPersonAndContact(collectiveId, persId)) .map(_.map { case (pers, org, cont) => PersonAndContacts(pers, org, cont) }) def findAllPersonRefs( - account: AccountId, + collectiveId: CollectiveId, nameQuery: Option[String], order: PersonOrder ): F[Vector[IdRef]] = store.transact( - RPerson.findAllRef(account.collective, nameQuery, PersonOrder.nameOnly(order)) + RPerson.findAllRef(collectiveId, nameQuery, PersonOrder.nameOnly(order)) ) def addPerson(s: PersonAndContacts): F[AddResult] = @@ -205,13 +209,13 @@ object OOrganization { def updatePerson(s: PersonAndContacts): F[AddResult] = QOrganization.updatePerson(s.person, s.contacts, s.person.cid)(store) - def deleteOrg(orgId: Ident, collective: Ident): F[AddResult] = + def deleteOrg(orgId: Ident, collective: CollectiveId): F[AddResult] = store .transact(QOrganization.deleteOrg(orgId, collective)) .attempt .map(AddResult.fromUpdate) - def deletePerson(personId: Ident, collective: Ident): F[AddResult] = + def deletePerson(personId: Ident, collective: CollectiveId): F[AddResult] = store .transact(QOrganization.deletePerson(personId, collective)) .attempt diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OShare.scala b/modules/backend/src/main/scala/docspell/backend/ops/OShare.scala index 630fa57a..250b0959 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OShare.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OShare.scala @@ -28,16 +28,16 @@ import scodec.bits.ByteVector trait OShare[F[_]] { def findAll( - collective: Ident, + collective: CollectiveId, ownerLogin: Option[Ident], query: Option[String] ): F[List[ShareData]] - def delete(id: Ident, collective: Ident): F[Boolean] + def delete(id: Ident, collective: CollectiveId): F[Boolean] def addNew(share: OShare.NewShare): F[OShare.ChangeResult] - def findOne(id: Ident, collective: Ident): OptionT[F, ShareData] + def findOne(id: Ident, collective: CollectiveId): OptionT[F, ShareData] def update( id: Ident, @@ -71,7 +71,12 @@ trait OShare[F[_]] { */ def parseQuery(share: ShareQuery, qs: String): QueryParseResult - def sendMail(account: AccountId, connection: Ident, mail: ShareMail): F[SendResult] + def sendMail( + collectiveId: CollectiveId, + userId: Ident, + connection: Ident, + mail: ShareMail + ): F[SendResult] } object OShare { @@ -97,7 +102,7 @@ object OShare { case object NotFound extends SendResult } - final case class ShareQuery(id: Ident, account: AccountId, query: ItemQuery) + final case class ShareQuery(id: Ident, account: AccountInfo, query: ItemQuery) sealed trait VerifyResult { def toEither: Either[String, ShareToken] = @@ -143,7 +148,7 @@ object OShare { def queryWithFulltext: ChangeResult = QueryWithFulltext } - final case class ShareData(share: RShare, user: RUser) + final case class ShareData(share: RShare, account: AccountInfo) def apply[F[_]: Async]( store: Store[F], @@ -155,7 +160,7 @@ object OShare { private[this] val logger = docspell.logging.getLogger[F] def findAll( - collective: Ident, + collective: CollectiveId, ownerLogin: Option[Ident], query: Option[String] ): F[List[ShareData]] = @@ -163,7 +168,7 @@ object OShare { .transact(RShare.findAllByCollective(collective, ownerLogin, query)) .map(_.map(ShareData.tupled)) - def delete(id: Ident, collective: Ident): F[Boolean] = + def delete(id: Ident, collective: CollectiveId): F[Boolean] = store.transact(RShare.deleteByIdAndCid(id, collective)).map(_ > 0) def addNew(share: NewShare): F[ChangeResult] = @@ -225,7 +230,7 @@ object OShare { case _ => true } - def findOne(id: Ident, collective: Ident): OptionT[F, ShareData] = + def findOne(id: Ident, collective: CollectiveId): OptionT[F, ShareData] = RShare .findOne(id, collective) .mapK(store.transform) @@ -286,8 +291,8 @@ object OShare { RShare .findCurrentActive(id) .mapK(store.transform) - .map { case (share, user) => - ShareQuery(share.id, user.accountId, share.query) + .map { case (share, accInfo) => + ShareQuery(share.id, accInfo, share.query) } def findAttachmentPreview( @@ -298,7 +303,7 @@ object OShare { sq <- findShareQuery(shareId) _ <- checkAttachment(sq, AttachId(attachId.id)) res <- OptionT( - itemSearch.findAttachmentPreview(attachId, sq.account.collective) + itemSearch.findAttachmentPreview(attachId, sq.account.collectiveId) ) } yield res @@ -306,14 +311,14 @@ object OShare { for { sq <- findShareQuery(shareId) _ <- checkAttachment(sq, AttachId(attachId.id)) - res <- OptionT(itemSearch.findAttachment(attachId, sq.account.collective)) + res <- OptionT(itemSearch.findAttachment(attachId, sq.account.collectiveId)) } yield res def findItem(itemId: Ident, shareId: Ident): OptionT[F, ItemData] = for { sq <- findShareQuery(shareId) _ <- checkAttachment(sq, Expr.itemIdEq(itemId.id)) - res <- OptionT(itemSearch.findItem(itemId, sq.account.collective)) + res <- OptionT(itemSearch.findItem(itemId, sq.account.collectiveId)) } yield res /** Check whether the attachment with the given id is in the results of the given @@ -343,12 +348,13 @@ object OShare { } def sendMail( - account: AccountId, + collectiveId: CollectiveId, + userId: Ident, connection: Ident, mail: ShareMail ): F[SendResult] = { val getSmtpSettings: OptionT[F, RUserEmail] = - OptionT(store.transact(RUserEmail.getByName(account, connection))) + OptionT(store.transact(RUserEmail.getByName(userId, connection))) def createMail(sett: RUserEmail): OptionT[F, Mail[F]] = { import _root_.emil.builder._ @@ -366,20 +372,19 @@ object OShare { ) } - def sendMail(cfg: MailConfig, mail: Mail[F]): F[Either[SendResult, String]] = + def doSendMail(cfg: MailConfig, mail: Mail[F]): F[Either[SendResult, String]] = emil(cfg).send(mail).map(_.head).attempt.map(_.left.map(SendResult.SendFailure)) (for { _ <- RShare .findCurrentActive(mail.shareId) - .filter(_._2.cid == account.collective) + .filter(_._2.collectiveId == collectiveId) .mapK(store.transform) mailCfg <- getSmtpSettings mail <- createMail(mailCfg) - mid <- OptionT.liftF(sendMail(mailCfg.toMailConfig, mail)) + mid <- OptionT.liftF(doSendMail(mailCfg.toMailConfig, mail)) conv = mid.fold(identity, id => SendResult.Success(id)) } yield conv).getOrElse(SendResult.NotFound) } - } } diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OSource.scala b/modules/backend/src/main/scala/docspell/backend/ops/OSource.scala index b87bb231..0267b10e 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OSource.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OSource.scala @@ -9,7 +9,7 @@ package docspell.backend.ops import cats.effect.{Async, Resource} import cats.implicits._ -import docspell.common.{AccountId, Ident} +import docspell.common._ import docspell.store.UpdateResult import docspell.store.records.RSource import docspell.store.records.SourceData @@ -17,22 +17,22 @@ import docspell.store.{AddResult, Store} trait OSource[F[_]] { - def findAll(account: AccountId): F[Vector[SourceData]] + def findAll(collectiveId: CollectiveId): F[Vector[SourceData]] def add(s: RSource, tags: List[String]): F[AddResult] def update(s: RSource, tags: List[String]): F[AddResult] - def delete(id: Ident, collective: Ident): F[UpdateResult] + def delete(id: Ident, collective: CollectiveId): F[UpdateResult] } object OSource { def apply[F[_]: Async](store: Store[F]): Resource[F, OSource[F]] = Resource.pure[F, OSource[F]](new OSource[F] { - def findAll(account: AccountId): F[Vector[SourceData]] = + def findAll(collectiveId: CollectiveId): F[Vector[SourceData]] = store - .transact(SourceData.findAll(account.collective, _.abbrev)) + .transact(SourceData.findAll(collectiveId, _.abbrev)) .compile .to(Vector) @@ -52,7 +52,7 @@ object OSource { store.add(insert, exists).map(_.fold(identity, _.withMsg(msg), identity)) } - def delete(id: Ident, collective: Ident): F[UpdateResult] = + def delete(id: Ident, collective: CollectiveId): F[UpdateResult] = UpdateResult.fromUpdate(store.transact(SourceData.delete(id, collective))) }) diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OTag.scala b/modules/backend/src/main/scala/docspell/backend/ops/OTag.scala index 07632c9b..179488dd 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OTag.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OTag.scala @@ -10,7 +10,7 @@ import cats.data.NonEmptyList import cats.effect.{Async, Resource} import cats.implicits._ -import docspell.common.{AccountId, Ident} +import docspell.common._ import docspell.store.records.RTagSource import docspell.store.records.{RTag, RTagItem} import docspell.store.{AddResult, Store} @@ -18,7 +18,7 @@ import docspell.store.{AddResult, Store} trait OTag[F[_]] { def findAll( - account: AccountId, + collectiveId: CollectiveId, query: Option[String], order: OTag.TagOrder ): F[Vector[RTag]] @@ -27,7 +27,7 @@ trait OTag[F[_]] { def update(s: RTag): F[AddResult] - def delete(id: Ident, collective: Ident): F[AddResult] + def delete(id: Ident, collective: CollectiveId): F[AddResult] /** Load all tags given their ids. Ids that are not available are ignored. */ def loadAll(ids: List[Ident]): F[Vector[RTag]] @@ -66,11 +66,11 @@ object OTag { def apply[F[_]: Async](store: Store[F]): Resource[F, OTag[F]] = Resource.pure[F, OTag[F]](new OTag[F] { def findAll( - account: AccountId, + collectiveId: CollectiveId, query: Option[String], order: TagOrder ): F[Vector[RTag]] = - store.transact(RTag.findAll(account.collective, query, TagOrder(order))) + store.transact(RTag.findAll(collectiveId, query, TagOrder(order))) def add(t: RTag): F[AddResult] = { def insert = RTag.insert(t) @@ -88,7 +88,7 @@ object OTag { store.add(insert, exists).map(_.fold(identity, _.withMsg(msg), identity)) } - def delete(id: Ident, collective: Ident): F[AddResult] = { + def delete(id: Ident, collective: CollectiveId): F[AddResult] = { val io = for { optTag <- RTag.findByIdAndCollective(id, collective) n0 <- optTag.traverse(t => RTagItem.deleteTag(t.tagId)) @@ -99,7 +99,7 @@ object OTag { } def loadAll(ids: List[Ident]): F[Vector[RTag]] = - if (ids.isEmpty) Vector.empty.pure[F] + if (ids.isEmpty) Vector.empty[RTag].pure[F] else store.transact(RTag.findAllById(ids)) }) } diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OUpload.scala b/modules/backend/src/main/scala/docspell/backend/ops/OUpload.scala index 9d49e65d..246cdd2d 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OUpload.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OUpload.scala @@ -11,9 +11,9 @@ import cats.data.{EitherT, OptionT} import cats.effect._ import cats.implicits._ import fs2.Stream - import docspell.backend.JobFactory import docspell.common._ +import docspell.scheduler.usertask.UserTaskScope import docspell.scheduler.{Job, JobStore} import docspell.store.Store import docspell.store.records._ @@ -22,7 +22,8 @@ trait OUpload[F[_]] { def submit( data: OUpload.UploadData[F], - account: AccountId, + collectiveId: CollectiveId, + userId: Option[Ident], itemId: Option[Ident] ): F[OUpload.UploadResult] @@ -38,12 +39,13 @@ trait OUpload[F[_]] { final def submitEither( data: OUpload.UploadData[F], - accOrSrc: Either[Ident, AccountId], + accOrSrc: Either[Ident, CollectiveId], + userId: Option[Ident], itemId: Option[Ident] ): F[OUpload.UploadResult] = accOrSrc match { case Right(acc) => - submit(data, acc, itemId) + submit(data, acc, userId, itemId) case Left(srcId) => submit(data, srcId, itemId) } @@ -90,7 +92,7 @@ object OUpload { def noFiles: UploadResult = NoFiles - /** A source (`RSource') could not be found for a given source-id. */ + /** A source (`RSource`) could not be found for a given source-id. */ case object NoSource extends UploadResult def noSource: UploadResult = NoSource @@ -99,6 +101,11 @@ object OUpload { case object NoItem extends UploadResult def noItem: UploadResult = NoItem + + /** A collective with the given id was not found */ + case object NoCollective extends UploadResult + + def noCollective: UploadResult = NoCollective } private def right[F[_]: Functor, A](a: F[A]): EitherT[F, UploadResult, A] = @@ -110,26 +117,30 @@ object OUpload { ): Resource[F, OUpload[F]] = Resource.pure[F, OUpload[F]](new OUpload[F] { private[this] val logger = docspell.logging.getLogger[F] + def submit( data: OUpload.UploadData[F], - account: AccountId, + collectiveId: CollectiveId, + userId: Option[Ident], itemId: Option[Ident] ): F[OUpload.UploadResult] = (for { - _ <- checkExistingItem(itemId, account.collective) - files <- right(data.files.traverse(saveFile(account)).map(_.flatten)) + _ <- checkExistingItem(itemId, collectiveId) + coll <- OptionT(store.transact(RCollective.findById(collectiveId))) + .toRight(UploadResult.noCollective) + files <- right(data.files.traverse(saveFile(coll.id)).map(_.flatten)) _ <- checkFileList(files) lang <- data.meta.language match { case Some(lang) => right(lang.pure[F]) case None => right( store - .transact(RCollective.findLanguage(account.collective)) + .transact(RCollective.findLanguage(collectiveId)) .map(_.getOrElse(Language.German)) ) } meta = ProcessItemArgs.ProcessMeta( - account.collective, + collectiveId, itemId, lang, data.meta.direction, @@ -143,12 +154,18 @@ object OUpload { data.meta.attachmentsOnly ) args = ProcessItemArgs(meta, files.toList) - jobs <- right(makeJobs(data, args, account)) + jobs <- right( + makeJobs( + data, + args, + UserTaskScope(collectiveId, userId) + ) + ) _ <- right(logger.debug(s"Storing jobs: $jobs")) res <- right(submitJobs(jobs.map(_.encode))) _ <- right( store.transact( - RSource.incrementCounter(data.meta.sourceAbbrev, account.collective) + RSource.incrementCounter(data.meta.sourceAbbrev, collectiveId) ) ) } yield res).fold(identity, identity) @@ -174,8 +191,7 @@ object OUpload { ), priority = src.source.priority ) - accId = AccountId(src.source.cid, src.source.sid) - result <- OptionT.liftF(submit(updata, accId, itemId)) + result <- OptionT.liftF(submit(updata, src.source.cid, None, itemId)) } yield result).getOrElse(UploadResult.noSource) private def submitJobs(jobs: List[Job[String]]): F[OUpload.UploadResult] = @@ -186,13 +202,13 @@ object OUpload { /** Saves the file into the database. */ private def saveFile( - accountId: AccountId + collectiveId: CollectiveId )(file: File[F]): F[Option[ProcessItemArgs.File]] = logger.info(s"Receiving file $file") *> file.data .through( store.fileRepo.save( - accountId.collective, + collectiveId, FileCategory.AttachmentSource, MimeTypeHint(file.name, None) ) @@ -212,7 +228,7 @@ object OUpload { private def checkExistingItem( itemId: Option[Ident], - coll: Ident + coll: CollectiveId ): EitherT[F, UploadResult, Unit] = itemId match { case None => @@ -232,22 +248,22 @@ object OUpload { private def makeJobs( data: UploadData[F], args: ProcessItemArgs, - account: AccountId + submitter: UserTaskScope ): F[List[Job[ProcessItemArgs]]] = if (data.meta.flattenArchives.getOrElse(false)) JobFactory - .multiUpload(args, account, data.priority, data.tracker) + .multiUpload(args, submitter, data.priority, data.tracker) .map(List(_)) else if (data.multiple) JobFactory.processItems( args.files.map(f => args.copy(files = List(f))), - account, + submitter, data.priority, data.tracker ) else JobFactory - .processItem[F](args, account, data.priority, data.tracker) + .processItem[F](args, submitter, data.priority, data.tracker) .map(List(_)) }) } diff --git a/modules/backend/src/main/scala/docspell/backend/ops/search/OSearch.scala b/modules/backend/src/main/scala/docspell/backend/ops/search/OSearch.scala index 483f8fc7..29c600b0 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/search/OSearch.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/search/OSearch.scala @@ -79,7 +79,7 @@ trait OSearch[F[_]] { * `q.fix` part. */ def parseQueryString( - accountId: AccountId, + accountId: AccountInfo, mode: SearchMode, qs: String ): QueryParseResult @@ -94,7 +94,7 @@ object OSearch { private[this] val logger = docspell.logging.getLogger[F] def parseQueryString( - accountId: AccountId, + accountId: AccountInfo, mode: SearchMode, qs: String ): QueryParseResult = { @@ -199,7 +199,7 @@ object OSearch { timed <- Duration.stopTime[F] resolved <- store .transact( - QItem.findItemsWithTags(q.fix.account.collective, Stream.emits(items)) + QItem.findItemsWithTags(q.fix.account.collectiveId, Stream.emits(items)) ) .compile .toVector @@ -233,13 +233,13 @@ object OSearch { } private def createFtsQuery( - account: AccountId, + account: AccountInfo, ftq: String ): F[FtsQuery] = store - .transact(QFolder.getMemberFolders(account)) + .transact(QFolder.getMemberFolders(account.collectiveId, account.userId)) .map(folders => - FtsQuery(ftq, account.collective, 500, 0) + FtsQuery(ftq, account.collectiveId, 500, 0) .withFolders(folders) ) diff --git a/modules/backend/src/main/scala/docspell/backend/signup/OSignup.scala b/modules/backend/src/main/scala/docspell/backend/signup/OSignup.scala index 92314b8c..67904766 100644 --- a/modules/backend/src/main/scala/docspell/backend/signup/OSignup.scala +++ b/modules/backend/src/main/scala/docspell/backend/signup/OSignup.scala @@ -6,14 +6,13 @@ package docspell.backend.signup +import cats.data.OptionT import cats.effect.{Async, Resource} import cats.implicits._ - import docspell.backend.PasswordCrypt import docspell.common._ import docspell.store.records.{RCollective, RInvitation, RUser} import docspell.store.{AddResult, Store} - import doobie.free.connection.ConnectionIO trait OSignup[F[_]] { @@ -48,7 +47,7 @@ object OSignup { def register(cfg: Config)(data: RegisterData): F[SignupResult] = cfg.mode match { case Config.Mode.Open => - addUser(data).map(SignupResult.fromAddResult) + addNewAccount(data, AccountSource.Local).map(SignupResult.fromAddResult) case Config.Mode.Closed => SignupResult.signupClosed.pure[F] @@ -61,7 +60,9 @@ object OSignup { min = now.minus(cfg.inviteTime) ok <- store.transact(RInvitation.useInvite(inv, min)) res <- - if (ok) addUser(data).map(SignupResult.fromAddResult) + if (ok) + addNewAccount(data, AccountSource.Local) + .map(SignupResult.fromAddResult) else SignupResult.invalidInvitationKey.pure[F] _ <- if (retryInvite(res)) @@ -84,26 +85,37 @@ object OSignup { SignupResult .failure(new Exception("Account source must not be LOCAL!")) .pure[F] - else - for { - recs <- makeRecords(data.collName, data.login, Password(""), data.source) - cres <- store.add( - RCollective.insert(recs._1), - RCollective.existsById(data.collName) - ) - ures <- store.add(RUser.insert(recs._2), RUser.exists(data.login)) - res = cres match { - case AddResult.Failure(ex) => - SignupResult.failure(ex) - case _ => - ures match { - case AddResult.Failure(ex) => - SignupResult.failure(ex) - case _ => - SignupResult.success - } - } - } yield res + else { + val maybeInsert: ConnectionIO[Unit] = + for { + now <- Timestamp.current[ConnectionIO] + cid <- OptionT(RCollective.findByName(data.collName)) + .map(_.id) + .getOrElseF( + RCollective.insert(RCollective.makeDefault(data.collName, now)) + ) + + uid <- Ident.randomId[ConnectionIO] + newUser = RUser.makeDefault( + uid, + data.login, + cid, + Password(""), + AccountSource.OpenId, + now + ) + _ <- OptionT(RUser.findByLogin(data.login, cid.some)) + .map(_ => 1) + .getOrElseF(RUser.insert(newUser)) + } yield () + + store.transact(maybeInsert).attempt.map { + case Left(ex) => + SignupResult.failure(ex) + case Right(_) => + SignupResult.success + } + } private def retryInvite(res: SignupResult): Boolean = res match { @@ -119,41 +131,38 @@ object OSignup { false } - private def addUser(data: RegisterData): F[AddResult] = { - def insert(coll: RCollective, user: RUser): ConnectionIO[Int] = + private def addNewAccount( + data: RegisterData, + accountSource: AccountSource + ): F[AddResult] = { + def insert: ConnectionIO[Int] = for { - n1 <- RCollective.insert(coll) - n2 <- RUser.insert(user) - } yield n1 + n2 + now <- Timestamp.current[ConnectionIO] + cid <- RCollective.insert(RCollective.makeDefault(data.collName, now)) + uid <- Ident.randomId[ConnectionIO] + n2 <- RUser.insert( + RUser.makeDefault( + uid, + data.login, + cid, + if (data.password.isEmpty) data.password + else PasswordCrypt.crypt(data.password), + accountSource, + now + ) + ) + } yield n2 def collectiveExists: ConnectionIO[Boolean] = - RCollective.existsById(data.collName) + RCollective.existsByName(data.collName) val msg = s"The collective '${data.collName}' already exists." for { - cu <- makeRecords(data.collName, data.login, data.password, AccountSource.Local) - save <- store.add(insert(cu._1, cu._2), collectiveExists) - } yield save.fold(identity, _.withMsg(msg), identity) + exists <- store.transact(collectiveExists) + saved <- + if (exists) AddResult.entityExists(msg).pure[F] + else store.transact(insert).attempt.map(AddResult.fromUpdate) + } yield saved } - - private def makeRecords( - collName: Ident, - login: Ident, - password: Password, - source: AccountSource - ): F[(RCollective, RUser)] = - for { - id2 <- Ident.randomId[F] - now <- Timestamp.current[F] - c = RCollective.makeDefault(collName, now) - u = RUser.makeDefault( - id2, - login, - collName, - PasswordCrypt.crypt(password), - source, - now - ) - } yield (c, u) }) } diff --git a/modules/backend/src/test/scala/docspell/backend/auth/AuthTokenTest.scala b/modules/backend/src/test/scala/docspell/backend/auth/AuthTokenTest.scala index 10aa99f1..8c6e111f 100644 --- a/modules/backend/src/test/scala/docspell/backend/auth/AuthTokenTest.scala +++ b/modules/backend/src/test/scala/docspell/backend/auth/AuthTokenTest.scala @@ -16,8 +16,18 @@ import scodec.bits.ByteVector class AuthTokenTest extends CatsEffectSuite { - val user = AccountId(Ident.unsafe("demo"), Ident.unsafe("demo")) - val john = AccountId(Ident.unsafe("demo"), Ident.unsafe("john")) + val user = AccountInfo( + CollectiveId(1), + Ident.unsafe("demo"), + Ident.unsafe("abc-def"), + Ident.unsafe("demo") + ) + val john = AccountInfo( + CollectiveId(1), + Ident.unsafe("demo"), + Ident.unsafe("abc-hij"), + Ident.unsafe("john") + ) val secret = ByteVector.fromValidHex("caffee") val otherSecret = ByteVector.fromValidHex("16bad") diff --git a/modules/common/src/main/scala/docspell/common/AllPreviewsArgs.scala b/modules/common/src/main/scala/docspell/common/AllPreviewsArgs.scala index a3d20162..2256346f 100644 --- a/modules/common/src/main/scala/docspell/common/AllPreviewsArgs.scala +++ b/modules/common/src/main/scala/docspell/common/AllPreviewsArgs.scala @@ -16,7 +16,7 @@ import io.circe.{Decoder, Encoder} * collective is specified, it considers all attachments. */ case class AllPreviewsArgs( - collective: Option[Ident], + collective: Option[CollectiveId], storeMode: MakePreviewArgs.StoreMode ) diff --git a/modules/common/src/main/scala/docspell/common/CollectiveId.scala b/modules/common/src/main/scala/docspell/common/CollectiveId.scala index 4a8fdfe4..ce6a849b 100644 --- a/modules/common/src/main/scala/docspell/common/CollectiveId.scala +++ b/modules/common/src/main/scala/docspell/common/CollectiveId.scala @@ -10,6 +10,12 @@ import io.circe.{Decoder, Encoder} final class CollectiveId(val value: Long) extends AnyVal { + def valueAsString: String = + value.toString + + def valueAsIdent: Ident = + Ident.unsafe(valueAsString) + override def toString = s"CollectiveId($value)" } @@ -19,6 +25,12 @@ object CollectiveId { def apply(n: Long): CollectiveId = new CollectiveId(n) + def fromString(str: String): Either[String, CollectiveId] = + str.trim.toLongOption.map(CollectiveId(_)).toRight(s"Invalid collective id: $str") + + def unsafeFromString(str: String): CollectiveId = + fromString(str).fold(sys.error, identity) + implicit val jsonEncoder: Encoder[CollectiveId] = Encoder.encodeLong.contramap(_.value) implicit val jsonDecoder: Decoder[CollectiveId] = diff --git a/modules/common/src/main/scala/docspell/common/ConvertAllPdfArgs.scala b/modules/common/src/main/scala/docspell/common/ConvertAllPdfArgs.scala index 00a4319b..644fd412 100644 --- a/modules/common/src/main/scala/docspell/common/ConvertAllPdfArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ConvertAllPdfArgs.scala @@ -16,7 +16,7 @@ import io.circe.generic.semiauto._ * submitted by this task run in the realm of the collective (and only their files are * considered). If it is empty, it is a system task and all files are considered. */ -case class ConvertAllPdfArgs(collective: Option[Ident]) +case class ConvertAllPdfArgs(collective: Option[CollectiveId]) object ConvertAllPdfArgs { diff --git a/modules/common/src/main/scala/docspell/common/EmptyTrashArgs.scala b/modules/common/src/main/scala/docspell/common/EmptyTrashArgs.scala index 8e76d2d5..0fb47266 100644 --- a/modules/common/src/main/scala/docspell/common/EmptyTrashArgs.scala +++ b/modules/common/src/main/scala/docspell/common/EmptyTrashArgs.scala @@ -18,7 +18,7 @@ import io.circe.generic.semiauto._ * with state `ItemState.Deleted`. */ case class EmptyTrashArgs( - collective: Ident, + collective: CollectiveId, minAge: Duration ) { @@ -35,8 +35,8 @@ object EmptyTrashArgs { val defaultSchedule = CalEvent.unsafe("*-*-1/7 03:00:00 UTC") - def periodicTaskId(coll: Ident): Ident = - Ident.unsafe(s"docspell") / taskName / coll + def periodicTaskId(coll: CollectiveId): Ident = + Ident.unsafe(s"docspell") / taskName / coll.value implicit val jsonEncoder: Encoder[EmptyTrashArgs] = deriveEncoder[EmptyTrashArgs] @@ -45,5 +45,4 @@ object EmptyTrashArgs { def parse(str: String): Either[Throwable, EmptyTrashArgs] = str.parseJsonAs[EmptyTrashArgs] - } diff --git a/modules/common/src/main/scala/docspell/common/FileCategory.scala b/modules/common/src/main/scala/docspell/common/FileCategory.scala index 21caa25c..018b1096 100644 --- a/modules/common/src/main/scala/docspell/common/FileCategory.scala +++ b/modules/common/src/main/scala/docspell/common/FileCategory.scala @@ -20,7 +20,7 @@ sealed trait FileCategory { self: Product => final def id: Ident = Ident.unsafe(self.productPrefix.toLowerCase) - def toFileKey(collective: Ident, fileId: Ident): FileKey = + def toFileKey(collective: CollectiveId, fileId: Ident): FileKey = common.FileKey(collective, this, fileId) } diff --git a/modules/common/src/main/scala/docspell/common/FileKey.scala b/modules/common/src/main/scala/docspell/common/FileKey.scala index 46eb8409..ab9947f2 100644 --- a/modules/common/src/main/scala/docspell/common/FileKey.scala +++ b/modules/common/src/main/scala/docspell/common/FileKey.scala @@ -9,9 +9,9 @@ package docspell.common import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} -final case class FileKey(collective: Ident, category: FileCategory, id: Ident) { +final case class FileKey(collective: CollectiveId, category: FileCategory, id: Ident) { override def toString = - s"${collective.id}/${category.id.id}/${id.id}" + s"${collective.value}/${category.id.id}/${id.id}" } object FileKey { diff --git a/modules/common/src/main/scala/docspell/common/Ident.scala b/modules/common/src/main/scala/docspell/common/Ident.scala index a0737022..9981fe8a 100644 --- a/modules/common/src/main/scala/docspell/common/Ident.scala +++ b/modules/common/src/main/scala/docspell/common/Ident.scala @@ -27,6 +27,9 @@ case class Ident(id: String) { def /(next: Ident): Ident = new Ident(id + Ident.concatChar + next.id) + def /(next: Number): Ident = + new Ident(id + Ident.concatChar + next) + def take(n: Int): Ident = new Ident(id.take(n)) } diff --git a/modules/common/src/main/scala/docspell/common/ItemAddonTaskArgs.scala b/modules/common/src/main/scala/docspell/common/ItemAddonTaskArgs.scala index 3333486d..128a7a9b 100644 --- a/modules/common/src/main/scala/docspell/common/ItemAddonTaskArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ItemAddonTaskArgs.scala @@ -15,7 +15,7 @@ import io.circe.{Decoder, Encoder} * tasks that are configured for 'existing-item' are run. */ final case class ItemAddonTaskArgs( - collective: Ident, + collective: CollectiveId, itemId: Ident, addonRunConfigs: Set[Ident] ) diff --git a/modules/common/src/main/scala/docspell/common/LearnClassifierArgs.scala b/modules/common/src/main/scala/docspell/common/LearnClassifierArgs.scala index 32eeca8c..7e36f32d 100644 --- a/modules/common/src/main/scala/docspell/common/LearnClassifierArgs.scala +++ b/modules/common/src/main/scala/docspell/common/LearnClassifierArgs.scala @@ -18,7 +18,7 @@ import io.circe.generic.semiauto._ * possible tags.. */ case class LearnClassifierArgs( - collective: Ident + collectiveId: CollectiveId ) { def makeSubject: String = @@ -37,5 +37,4 @@ object LearnClassifierArgs { def parse(str: String): Either[Throwable, LearnClassifierArgs] = str.parseJsonAs[LearnClassifierArgs] - } diff --git a/modules/common/src/main/scala/docspell/common/ProcessItemArgs.scala b/modules/common/src/main/scala/docspell/common/ProcessItemArgs.scala index 9a92cee7..5c830158 100644 --- a/modules/common/src/main/scala/docspell/common/ProcessItemArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ProcessItemArgs.scala @@ -17,7 +17,7 @@ import io.circe.generic.semiauto._ * This task is run for each new file to create a new item from it or to add this file as * an attachment to an existing item. * - * If the `itemId' is set to some value, the item is tried to load to amend with the + * If the `itemId` is set to some value, the item is tried to load to amend with the * given files. Otherwise a new item is created. * * It is also re-used by the 'ReProcessItem' task. @@ -43,7 +43,7 @@ object ProcessItemArgs { val multiUploadTaskName = Ident.unsafe("multi-upload-process") case class ProcessMeta( - collective: Ident, + collective: CollectiveId, itemId: Option[Ident], language: Language, direction: Option[Direction], @@ -73,5 +73,4 @@ object ProcessItemArgs { def parse(str: String): Either[Throwable, ProcessItemArgs] = str.parseJsonAs[ProcessItemArgs] - } diff --git a/modules/common/src/main/scala/docspell/common/ReIndexTaskArgs.scala b/modules/common/src/main/scala/docspell/common/ReIndexTaskArgs.scala index cd4c1ca6..8e3326ad 100644 --- a/modules/common/src/main/scala/docspell/common/ReIndexTaskArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ReIndexTaskArgs.scala @@ -9,7 +9,7 @@ package docspell.common import io.circe._ import io.circe.generic.semiauto._ -final case class ReIndexTaskArgs(collective: Option[Ident]) +final case class ReIndexTaskArgs(collective: Option[CollectiveId]) object ReIndexTaskArgs { val taskName = Ident.unsafe("full-text-reindex") @@ -17,7 +17,7 @@ object ReIndexTaskArgs { def tracker(args: ReIndexTaskArgs): Ident = args.collective match { case Some(cid) => - cid / DocspellSystem.migrationTaskTracker + cid.valueAsIdent / DocspellSystem.migrationTaskTracker case None => DocspellSystem.migrationTaskTracker } diff --git a/modules/common/src/main/scala/docspell/common/ScheduledAddonTaskArgs.scala b/modules/common/src/main/scala/docspell/common/ScheduledAddonTaskArgs.scala index 5104108e..5ce1bca0 100644 --- a/modules/common/src/main/scala/docspell/common/ScheduledAddonTaskArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ScheduledAddonTaskArgs.scala @@ -9,7 +9,7 @@ package docspell.common import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} -final case class ScheduledAddonTaskArgs(collective: Ident, addonTaskId: Ident) +final case class ScheduledAddonTaskArgs(collective: CollectiveId, addonTaskId: Ident) object ScheduledAddonTaskArgs { val taskName: Ident = Ident.unsafe("addon-scheduled-task") diff --git a/modules/common/src/main/scala/docspell/common/bc/BackendCommandRunner.scala b/modules/common/src/main/scala/docspell/common/bc/BackendCommandRunner.scala index 863e35ed..0161a57a 100644 --- a/modules/common/src/main/scala/docspell/common/bc/BackendCommandRunner.scala +++ b/modules/common/src/main/scala/docspell/common/bc/BackendCommandRunner.scala @@ -6,12 +6,12 @@ package docspell.common.bc -import docspell.common.Ident +import docspell.common.CollectiveId trait BackendCommandRunner[F[_], A] { - def run(collective: Ident, cmd: BackendCommand): F[A] + def run(collective: CollectiveId, cmd: BackendCommand): F[A] - def runAll(collective: Ident, cmds: List[BackendCommand]): F[A] + def runAll(collective: CollectiveId, cmds: List[BackendCommand]): F[A] } diff --git a/modules/fts-client/src/main/scala/docspell/ftsclient/FtsClient.scala b/modules/fts-client/src/main/scala/docspell/ftsclient/FtsClient.scala index 920ce450..400eefe7 100644 --- a/modules/fts-client/src/main/scala/docspell/ftsclient/FtsClient.scala +++ b/modules/fts-client/src/main/scala/docspell/ftsclient/FtsClient.scala @@ -67,7 +67,7 @@ trait FtsClient[F[_]] { def updateItemName( logger: Logger[F], itemId: Ident, - collective: Ident, + collective: CollectiveId, language: Language, name: String ): F[Unit] = @@ -79,7 +79,7 @@ trait FtsClient[F[_]] { def updateItemNotes( logger: Logger[F], itemId: Ident, - collective: Ident, + collective: CollectiveId, language: Language, notes: Option[String] ): F[Unit] = @@ -92,7 +92,7 @@ trait FtsClient[F[_]] { logger: Logger[F], itemId: Ident, attachId: Ident, - collective: Ident, + collective: CollectiveId, language: Language, name: Option[String] ): F[Unit] = @@ -112,7 +112,7 @@ trait FtsClient[F[_]] { def updateFolder( logger: Logger[F], itemId: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident] ): F[Unit] @@ -124,7 +124,7 @@ trait FtsClient[F[_]] { def clearAll(logger: Logger[F]): F[Unit] /** Clears the index from all data belonging to the given collective. */ - def clear(logger: Logger[F], collective: Ident): F[Unit] + def clear(logger: Logger[F], collective: CollectiveId): F[Unit] } @@ -149,7 +149,7 @@ object FtsClient { def updateFolder( logger: Logger[F], itemId: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident] ): F[Unit] = logger.warn("Full-text search is disabled!") @@ -166,7 +166,7 @@ object FtsClient { def clearAll(logger: Logger[F]): F[Unit] = logger.warn("Full-text search is disabled!") - def clear(logger: Logger[F], collective: Ident): F[Unit] = + def clear(logger: Logger[F], collective: CollectiveId): F[Unit] = logger.warn("Full-text search is disabled!") } } diff --git a/modules/fts-client/src/main/scala/docspell/ftsclient/FtsQuery.scala b/modules/fts-client/src/main/scala/docspell/ftsclient/FtsQuery.scala index a7aa414a..3183a742 100644 --- a/modules/fts-client/src/main/scala/docspell/ftsclient/FtsQuery.scala +++ b/modules/fts-client/src/main/scala/docspell/ftsclient/FtsQuery.scala @@ -21,7 +21,7 @@ import docspell.common._ */ final case class FtsQuery( q: String, - collective: Ident, + collective: CollectiveId, items: Set[Ident], folders: Set[Ident], limit: Int, @@ -37,7 +37,7 @@ final case class FtsQuery( } object FtsQuery { - def apply(q: String, collective: Ident, limit: Int, offset: Int): FtsQuery = + def apply(q: String, collective: CollectiveId, limit: Int, offset: Int): FtsQuery = FtsQuery(q, collective, Set.empty, Set.empty, limit, offset, HighlightSetting.default) case class HighlightSetting(pre: String, post: String) diff --git a/modules/fts-client/src/main/scala/docspell/ftsclient/TextData.scala b/modules/fts-client/src/main/scala/docspell/ftsclient/TextData.scala index 90da688a..b6f934c8 100644 --- a/modules/fts-client/src/main/scala/docspell/ftsclient/TextData.scala +++ b/modules/fts-client/src/main/scala/docspell/ftsclient/TextData.scala @@ -14,7 +14,7 @@ sealed trait TextData { def item: Ident - def collective: Ident + def collective: CollectiveId def folder: Option[Ident] @@ -32,7 +32,7 @@ object TextData { final case class Attachment( item: Ident, attachId: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident], language: Language, name: Option[String], @@ -46,7 +46,7 @@ object TextData { def attachment( item: Ident, attachId: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident], lang: Language, name: Option[String], @@ -56,7 +56,7 @@ object TextData { final case class Item( item: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident], name: Option[String], notes: Option[String], @@ -69,7 +69,7 @@ object TextData { def item( item: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident], name: Option[String], notes: Option[String], diff --git a/modules/fts-psql/src/main/resources/db/psqlfts/V2.1.0__collective_id.sql b/modules/fts-psql/src/main/resources/db/psqlfts/V2.1.0__collective_id.sql new file mode 100644 index 00000000..db11a8d9 --- /dev/null +++ b/modules/fts-psql/src/main/resources/db/psqlfts/V2.1.0__collective_id.sql @@ -0,0 +1,33 @@ +drop index "ftspsql_search_ftsidx"; +drop index "ftpsql_search_item_idx"; +drop index "ftpsql_search_attach_idx"; +drop index "ftpsql_search_folder_idx"; +drop table "ftspsql_search" cascade; + +create table "ftspsql_search"( + "id" varchar(254) not null primary key, + "item_id" varchar(254) not null, + "collective" bigint not null, + "lang" varchar(254) not null, + "attach_id" varchar(254), + "folder_id" varchar(254), + "updated_at" timestamptz not null default current_timestamp, + --- content columns + "attach_name" text, + "attach_content" text, + "item_name" text, + "item_notes" text, + --- index column + "fts_config" regconfig not null, + "text_index" tsvector + generated always as ( + setweight(to_tsvector("fts_config", coalesce("attach_name", '')), 'B') || + setweight(to_tsvector("fts_config", coalesce("item_name", '')), 'B') || + setweight(to_tsvector("fts_config", coalesce("attach_content", '')), 'C') || + setweight(to_tsvector("fts_config", coalesce("item_notes", '')), 'C')) stored +); + +create index "ftspsql_search_ftsidx" on "ftspsql_search" using GIN ("text_index"); +create index "ftpsql_search_item_idx" on "ftspsql_search"("item_id"); +create index "ftpsql_search_attach_idx" on "ftspsql_search"("attach_id"); +create index "ftpsql_search_folder_idx" on "ftspsql_search"("folder_id"); diff --git a/modules/fts-psql/src/main/scala/docspell/ftspsql/DoobieMeta.scala b/modules/fts-psql/src/main/scala/docspell/ftspsql/DoobieMeta.scala index 1a537ae5..34a30594 100644 --- a/modules/fts-psql/src/main/scala/docspell/ftspsql/DoobieMeta.scala +++ b/modules/fts-psql/src/main/scala/docspell/ftspsql/DoobieMeta.scala @@ -26,6 +26,8 @@ trait DoobieMeta { implicit val metaLanguage: Meta[Language] = Meta[String].timap(Language.unsafe)(_.iso3) + implicit val metaCollectiveId: Meta[CollectiveId] = + Meta[Long].timap(CollectiveId(_))(_.value) } object DoobieMeta { diff --git a/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRecord.scala b/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRecord.scala index f868b273..d760151c 100644 --- a/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRecord.scala +++ b/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRecord.scala @@ -7,14 +7,13 @@ package docspell.ftspsql import cats.syntax.all._ - -import docspell.common.{Ident, Language} +import docspell.common.{CollectiveId, Ident, Language} import docspell.ftsclient.TextData final case class FtsRecord( id: Ident, itemId: Ident, - collective: Ident, + collective: CollectiveId, language: Language, attachId: Option[Ident], folderId: Option[Ident], diff --git a/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRepository.scala b/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRepository.scala index cbc5053b..55d51c19 100644 --- a/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRepository.scala +++ b/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRepository.scala @@ -18,6 +18,12 @@ import doobie.implicits._ object FtsRepository extends DoobieMeta { val table = fr"ftspsql_search" + def containsData: ConnectionIO[Boolean] = + sql"select id from $table limit 1".query[String].option.map(_.isDefined) + + def containsNoData: ConnectionIO[Boolean] = + containsData.map(!_) + def searchSummary(pq: PgQueryParser, rn: RankNormalization)( q: FtsQuery ): ConnectionIO[SearchSummary] = { @@ -139,7 +145,7 @@ object FtsRepository extends DoobieMeta { def updateFolder( itemId: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident] ): ConnectionIO[Int] = (sql"UPDATE $table" ++ @@ -155,7 +161,7 @@ object FtsRepository extends DoobieMeta { def deleteAll: ConnectionIO[Int] = sql"DELETE FROM $table".update.run - def delete(collective: Ident): ConnectionIO[Int] = + def delete(collective: CollectiveId): ConnectionIO[Int] = sql"DELETE FROM $table WHERE collective = $collective".update.run def resetAll: ConnectionIO[Int] = { diff --git a/modules/fts-psql/src/main/scala/docspell/ftspsql/PsqlFtsClient.scala b/modules/fts-psql/src/main/scala/docspell/ftspsql/PsqlFtsClient.scala index fefa3e55..f0dc64ea 100644 --- a/modules/fts-psql/src/main/scala/docspell/ftspsql/PsqlFtsClient.scala +++ b/modules/fts-psql/src/main/scala/docspell/ftspsql/PsqlFtsClient.scala @@ -46,6 +46,16 @@ final class PsqlFtsClient[F[_]: Sync](cfg: PsqlConfig, xa: Transactor[F]) engine, "initialize", DbMigration[F](cfg).run.as(FtsMigration.Result.WorkDone) + ), + FtsMigration( + 1, + engine, + "Re-Index if empty", + FtsRepository.containsNoData + .transact(xa) + .map(empty => + if (empty) FtsMigration.Result.IndexAll else FtsMigration.Result.WorkDone + ) ) ) ) @@ -104,11 +114,11 @@ final class PsqlFtsClient[F[_]: Sync](cfg: PsqlConfig, xa: Transactor[F]) def updateFolder( logger: Logger[F], itemId: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident] ): F[Unit] = logger.debug(s"Update folder '${folder - .map(_.id)}' in fts for collective ${collective.id} and item ${itemId.id}") *> + .map(_.id)}' in fts for collective ${collective.value} and item ${itemId.id}") *> FtsRepository.updateFolder(itemId, collective, folder).transact(xa).void def removeItem(logger: Logger[F], itemId: Ident): F[Unit] = @@ -123,8 +133,8 @@ final class PsqlFtsClient[F[_]: Sync](cfg: PsqlConfig, xa: Transactor[F]) logger.info(s"Deleting complete FTS index") *> FtsRepository.deleteAll.transact(xa).void - def clear(logger: Logger[F], collective: Ident): F[Unit] = - logger.info(s"Deleting index for collective ${collective.id}") *> + def clear(logger: Logger[F], collective: CollectiveId): F[Unit] = + logger.info(s"Deleting index for collective ${collective.value}") *> FtsRepository.delete(collective).transact(xa).void } diff --git a/modules/fts-psql/src/test/scala/docspell/ftspsql/PgFixtures.scala b/modules/fts-psql/src/test/scala/docspell/ftspsql/PgFixtures.scala index acbbe9e1..fe804f28 100644 --- a/modules/fts-psql/src/test/scala/docspell/ftspsql/PgFixtures.scala +++ b/modules/fts-psql/src/test/scala/docspell/ftspsql/PgFixtures.scala @@ -51,8 +51,8 @@ trait PgFixtures { self.transact(client.transactor) } - val collective1 = ident("coll1") - val collective2 = ident("coll2") + val collective1 = CollectiveId(1) + val collective2 = CollectiveId(2) val itemData: TextData.Item = TextData.Item( diff --git a/modules/fts-solr/src/main/scala/docspell/ftssolr/JsonCodec.scala b/modules/fts-solr/src/main/scala/docspell/ftssolr/JsonCodec.scala index 0d972749..e4684fe8 100644 --- a/modules/fts-solr/src/main/scala/docspell/ftssolr/JsonCodec.scala +++ b/modules/fts-solr/src/main/scala/docspell/ftssolr/JsonCodec.scala @@ -17,7 +17,8 @@ import io.circe.syntax._ trait JsonCodec { implicit def attachmentEncoder(implicit - enc: Encoder[Ident] + enc: Encoder[Ident], + encCid: Encoder[CollectiveId] ): Encoder[TextData.Attachment] = new Encoder[TextData.Attachment] { final def apply(td: TextData.Attachment): Json = { @@ -28,7 +29,7 @@ trait JsonCodec { cnt :: List( (Field.id.name, enc(td.id)), (Field.itemId.name, enc(td.item)), - (Field.collectiveId.name, enc(td.collective)), + (Field.collectiveId.name, encCid(td.collective)), (Field.folderId.name, td.folder.getOrElse(Ident.unsafe("")).asJson), (Field.attachmentId.name, enc(td.attachId)), (Field.attachmentName.name, Json.fromString(td.name.getOrElse(""))), @@ -39,13 +40,16 @@ trait JsonCodec { } } - implicit def itemEncoder(implicit enc: Encoder[Ident]): Encoder[TextData.Item] = + implicit def itemEncoder(implicit + enc: Encoder[Ident], + encCid: Encoder[CollectiveId] + ): Encoder[TextData.Item] = new Encoder[TextData.Item] { final def apply(td: TextData.Item): Json = Json.obj( (Field.id.name, enc(td.id)), (Field.itemId.name, enc(td.item)), - (Field.collectiveId.name, enc(td.collective)), + (Field.collectiveId.name, encCid(td.collective)), (Field.folderId.name, td.folder.getOrElse(Ident.unsafe("")).asJson), (Field.itemName.name, Json.fromString(td.name.getOrElse(""))), (Field.itemNotes.name, Json.fromString(td.notes.getOrElse(""))), diff --git a/modules/fts-solr/src/main/scala/docspell/ftssolr/QueryData.scala b/modules/fts-solr/src/main/scala/docspell/ftssolr/QueryData.scala index ab618070..03d64540 100644 --- a/modules/fts-solr/src/main/scala/docspell/ftssolr/QueryData.scala +++ b/modules/fts-solr/src/main/scala/docspell/ftssolr/QueryData.scala @@ -51,7 +51,7 @@ object QueryData { val items = fq.items.map(_.id).mkString(" ") val folders = fq.folders.map(_.id).mkString(" ") val filterQ = List( - s"""${Field.collectiveId.name}:"${fq.collective.id}"""", + s"""${Field.collectiveId.name}:"${fq.collective.value}"""", fq.items match { case s if s.isEmpty => "" diff --git a/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrFtsClient.scala b/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrFtsClient.scala index 16f7bd13..97748aba 100644 --- a/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrFtsClient.scala +++ b/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrFtsClient.scala @@ -41,11 +41,11 @@ final class SolrFtsClient[F[_]: Async]( def updateFolder( logger: Logger[F], itemId: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident] ): F[Unit] = logger.debug( - s"Update folder in solr index for coll/item ${collective.id}/${itemId.id}" + s"Update folder in solr index for coll/item ${collective.value}/${itemId.id}" ) *> solrUpdate.updateFolder(itemId, collective, folder) @@ -75,9 +75,9 @@ final class SolrFtsClient[F[_]: Async]( logger.info("Deleting complete full-text index!") *> solrUpdate.delete("*:*", Option(0)) - def clear(logger: Logger[F], collective: Ident): F[Unit] = - logger.info(s"Deleting full-text index for collective ${collective.id}") *> - solrUpdate.delete(s"${Field.collectiveId.name}:${collective.id}", Option(0)) + def clear(logger: Logger[F], collective: CollectiveId): F[Unit] = + logger.info(s"Deleting full-text index for collective ${collective.value}") *> + solrUpdate.delete(s"${Field.collectiveId.name}:${collective.value}", Option(0)) } object SolrFtsClient { diff --git a/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrSetup.scala b/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrSetup.scala index e0dde5a6..0149b457 100644 --- a/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrSetup.scala +++ b/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrSetup.scala @@ -155,7 +155,8 @@ object SolrSetup { "Add polish", addContentField(Language.Polish) ), - SolrMigration.reIndexAll(26, "Re-Index after adding polish content field") + SolrMigration.reIndexAll(26, "Re-Index after adding polish content field"), + SolrMigration.reIndexAll(27, "Re-Index after collective-id change") ) def addFolderField: F[Unit] = diff --git a/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrUpdate.scala b/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrUpdate.scala index 336aa473..033538c0 100644 --- a/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrUpdate.scala +++ b/modules/fts-solr/src/main/scala/docspell/ftssolr/SolrUpdate.scala @@ -27,7 +27,11 @@ trait SolrUpdate[F[_]] { def update(tds: List[TextData]): F[Unit] - def updateFolder(itemId: Ident, collective: Ident, folder: Option[Ident]): F[Unit] + def updateFolder( + itemId: Ident, + collective: CollectiveId, + folder: Option[Ident] + ): F[Unit] def updateVersionDoc(doc: VersionDoc): F[Unit] @@ -63,13 +67,13 @@ object SolrUpdate { def updateFolder( itemId: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident] ): F[Unit] = { val queryUrl = Uri.unsafeFromString(cfg.url.asString) / "query" val q = QueryData( "*:*", - s"${Field.itemId.name}:${itemId.id} AND ${Field.collectiveId.name}:${collective.id}", + s"${Field.itemId.name}:${itemId.id} AND ${Field.collectiveId.name}:${collective.value}", Int.MaxValue, 0, List(Field.id), @@ -97,13 +101,12 @@ object SolrUpdate { client.expect[Unit](req) } - private val minOneChange: TextData => Boolean = - _ match { - case td: TextData.Attachment => - td.name.isDefined || td.text.isDefined - case td: TextData.Item => - td.name.isDefined || td.notes.isDefined - } + private val minOneChange: TextData => Boolean = { + case td: TextData.Attachment => + td.name.isDefined || td.text.isDefined + case td: TextData.Item => + td.name.isDefined || td.notes.isDefined + } } } diff --git a/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala b/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala index e3a679a8..2a6669ee 100644 --- a/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala +++ b/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala @@ -9,8 +9,8 @@ package docspell.joex import cats.effect._ import cats.implicits._ import fs2.concurrent.SignallingRef - import docspell.backend.MailAddressCodec +import docspell.backend.joex.FindJobOwnerAccount import docspell.backend.ops._ import docspell.common._ import docspell.joex.emptytrash._ @@ -27,7 +27,6 @@ import docspell.scheduler.impl.{JobStoreModuleBuilder, SchedulerModuleBuilder} import docspell.scheduler.usertask.{UserTaskScope, UserTaskStore} import docspell.store.Store import docspell.store.records.{REmptyTrashSetting, RJobLog} - import emil.javamail._ import org.http4s.client.Client @@ -117,6 +116,7 @@ object JoexAppImpl extends MailAddressCodec { jobStoreModule = JobStoreModuleBuilder(store) .withPubsub(pubSubT) .withEventSink(notificationMod) + .withFindJobOwner(FindJobOwnerAccount(store)) .build tasks <- JoexTasks.resource( diff --git a/modules/restserver/src/main/scala/docspell/restserver/RestAppImpl.scala b/modules/restserver/src/main/scala/docspell/restserver/RestAppImpl.scala index 45f58c2e..1e1e9e8e 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/RestAppImpl.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/RestAppImpl.scala @@ -9,9 +9,9 @@ package docspell.restserver import cats.effect._ import fs2.Stream import fs2.concurrent.Topic - import docspell.backend.BackendApp import docspell.backend.auth.{AuthToken, ShareToken} +import docspell.backend.joex.FindJobOwnerAccount import docspell.common.Pools import docspell.config.FtsType import docspell.ftsclient.FtsClient @@ -28,7 +28,6 @@ import docspell.restserver.webapp.{TemplateRoutes, Templates, WebjarRoutes} import docspell.restserver.ws.{OutputEvent, WebSocketRoutes} import docspell.scheduler.impl.JobStoreModuleBuilder import docspell.store.Store - import emil.javamail.JavaMailEmil import org.http4s.HttpRoutes import org.http4s.client.Client @@ -181,6 +180,7 @@ object RestAppImpl { schedulerMod = JobStoreModuleBuilder(store) .withPubsub(pubSubT) .withEventSink(notificationMod) + .withFindJobOwner(FindJobOwnerAccount[F](store)) .build backend <- BackendApp .create[F]( diff --git a/modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala b/modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala index 66578f53..ad2afaf7 100644 --- a/modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala +++ b/modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala @@ -1,6 +1,7 @@ package docspell.scheduler import cats.Applicative +import cats.data.{Kleisli, OptionT} import docspell.common.AccountInfo /** Strategy to find the user that submitted the job. This is used to emit events about @@ -8,12 +9,18 @@ import docspell.common.AccountInfo * * If an account cannot be determined, no events can be send. */ -trait FindJobOwner[F[_]] { +trait FindJobOwner[F[_]] { self => def apply(job: Job[_]): F[Option[AccountInfo]] + + final def kleisli: Kleisli[OptionT[F, *], Job[_], AccountInfo] = + Kleisli(job => OptionT(self(job))) } object FindJobOwner { def none[F[_]: Applicative]: FindJobOwner[F] = (_: Job[_]) => Applicative[F].pure(None) + + def of[F[_]](f: Job[_] => F[Option[AccountInfo]]): FindJobOwner[F] = + (job: Job[_]) => f(job) } diff --git a/modules/scheduler/api/src/main/scala/docspell/scheduler/Job.scala b/modules/scheduler/api/src/main/scala/docspell/scheduler/Job.scala index a6f4fc82..1050645f 100644 --- a/modules/scheduler/api/src/main/scala/docspell/scheduler/Job.scala +++ b/modules/scheduler/api/src/main/scala/docspell/scheduler/Job.scala @@ -8,9 +8,8 @@ package docspell.scheduler import cats.effect.Sync import cats.syntax.functor._ - import docspell.common._ - +import docspell.scheduler.usertask.UserTaskScope import io.circe.Encoder final case class Job[A]( @@ -31,14 +30,14 @@ final case class Job[A]( object Job { def createNew[F[_]: Sync, A]( task: Ident, - group: Ident, + submitter: UserTaskScope, args: A, subject: String, - submitter: Ident, priority: Priority, tracker: Option[Ident] ): F[Job[A]] = Ident.randomId[F].map { id => - Job(id, task, group, args, subject, submitter, priority, tracker) + val accId = submitter.toAccountId + Job(id, task, accId.collective, args, subject, accId.user, priority, tracker) } } diff --git a/modules/scheduler/api/src/main/scala/docspell/scheduler/usertask/UserTaskScope.scala b/modules/scheduler/api/src/main/scala/docspell/scheduler/usertask/UserTaskScope.scala index bd3027fe..8ca920db 100644 --- a/modules/scheduler/api/src/main/scala/docspell/scheduler/usertask/UserTaskScope.scala +++ b/modules/scheduler/api/src/main/scala/docspell/scheduler/usertask/UserTaskScope.scala @@ -7,52 +7,69 @@ package docspell.scheduler.usertask import docspell.common._ +import docspell.scheduler.usertask.UserTaskScope._ sealed trait UserTaskScope { self: Product => def name: String = productPrefix.toLowerCase - def collective: Ident + def collectiveId: Option[CollectiveId] - def fold[A](fa: AccountId => A, fb: Ident => A): A + def fold[A](fa: Account => A, fb: CollectiveId => A, fc: => A): A /** Maps to the account or uses the collective for both parts if the scope is collective * wide. */ - private[scheduler] def toAccountId: AccountId = - AccountId(collective, fold(_.user, identity)) + protected[scheduler] def toAccountId: AccountId } object UserTaskScope { - final case class Account(account: AccountId) extends UserTaskScope { - val collective = account.collective + final case class Account(collective: CollectiveId, userId: Ident) + extends UserTaskScope { + val collectiveId = Some(collective) - def fold[A](fa: AccountId => A, fb: Ident => A): A = - fa(account) + def fold[A](fa: Account => A, fb: CollectiveId => A, fc: => A): A = + fa(this) + + protected[scheduler] val toAccountId: AccountId = + AccountId(collective.valueAsIdent, userId) } - final case class Collective(collective: Ident) extends UserTaskScope { - def fold[A](fa: AccountId => A, fb: Ident => A): A = + final case class Collective(collective: CollectiveId) extends UserTaskScope { + val collectiveId = Some(collective) + def fold[A](fa: Account => A, fb: CollectiveId => A, fc: => A): A = fb(collective) + + protected[scheduler] val toAccountId: AccountId = { + val c = collective.valueAsIdent + AccountId(c, c) + } } - def collective(id: Ident): UserTaskScope = + case object System extends UserTaskScope { + val collectiveId = None + + def fold[A](fa: Account => A, fb: CollectiveId => A, fc: => A): A = + fc + + protected[scheduler] val toAccountId: AccountId = + DocspellSystem.account + } + + def collective(id: CollectiveId): UserTaskScope = Collective(id) - def account(accountId: AccountId): UserTaskScope = - Account(accountId) + def account(collectiveId: CollectiveId, userId: Ident): UserTaskScope = + Account(collectiveId, userId) - def apply(accountId: AccountId): UserTaskScope = - UserTaskScope.account(accountId) + def apply(collectiveId: CollectiveId, userId: Option[Ident]): UserTaskScope = + userId.map(Account(collectiveId, _)).getOrElse(collective(collectiveId)) - def apply(collective: Ident): UserTaskScope = - UserTaskScope.collective(collective) - - def apply(collective: Ident, login: Option[Ident]): UserTaskScope = - login.map(AccountId(collective, _)).map(account).getOrElse(apply(collective)) + def apply(info: AccountInfo): UserTaskScope = + account(info.collectiveId, info.userId) def system: UserTaskScope = - collective(DocspellSystem.taskGroup) + UserTaskScope.System } diff --git a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/QUserTask.scala b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/QUserTask.scala index c0df3849..5597bd2e 100644 --- a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/QUserTask.scala +++ b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/QUserTask.scala @@ -39,14 +39,14 @@ object QUserTask { ): F[RPeriodicTask] = QUserTask .create[F]( - ut.enabled, - scope, - ut.name, - ut.args, - subject.getOrElse(s"${scope.fold(_.user.id, _.id)}: ${ut.name.id}"), - Priority.Low, - ut.timer, - ut.summary + enabled = ut.enabled, + scope = scope, + task = ut.name, + args = ut.args, + subject = subject.getOrElse(s"${scope.toAccountId.asString}: ${ut.name.id}"), + priority = Priority.Low, + timer = ut.timer, + summary = ut.summary ) .map(r => r.copy(id = ut.id)) } @@ -151,24 +151,24 @@ object QUserTask { .current[F] .map { now => RPeriodicTask( - id, - enabled, - task, - scope.collective, - args, - subject, - scope.fold(_.user, identity), - priority, - None, - None, - timer, - timer + id = id, + enabled = enabled, + task = task, + group = scope.toAccountId.collective, + args = args, + subject = subject, + submitter = scope.toAccountId.user, + priority = priority, + worker = None, + marked = None, + timer = timer, + nextrun = timer .nextElapse(now.atZone(Timestamp.UTC)) .map(_.toInstant) .map(Timestamp.apply) .getOrElse(Timestamp.Epoch), - now, - summary + created = now, + summary = summary ) } ) diff --git a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerModuleBuilder.scala b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerModuleBuilder.scala index e8d68d77..f2931206 100644 --- a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerModuleBuilder.scala +++ b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/SchedulerModuleBuilder.scala @@ -39,6 +39,7 @@ case class SchedulerModuleBuilder[F[_]: Async] private ( schedulerR <- schedulerBuilder .withPubSub(jobStoreModule.pubSubT) .withEventSink(jobStoreModule.eventSink) + .withFindJobOwner(jobStoreModule.findJobOwner) .withQueue(queue) .resource diff --git a/modules/store/src/main/scala/db/migration/MigrationTasks.scala b/modules/store/src/main/scala/db/migration/MigrationTasks.scala index 7cda04d2..ec237f55 100644 --- a/modules/store/src/main/scala/db/migration/MigrationTasks.scala +++ b/modules/store/src/main/scala/db/migration/MigrationTasks.scala @@ -123,8 +123,8 @@ trait MigrationTasks { private def saveChannel(ch: Channel, account: AccountId): ConnectionIO[ChannelRef] = (for { newId <- OptionT.liftF(Ident.randomId[ConnectionIO]) - userData <- OptionT(QLogin.findUser(account)) - userId = userData.account.userId + userData <- OptionT(QLogin.findAccount(account)) + userId = userData.userId r <- RNotificationChannel.fromChannel(ch, newId, userId) _ <- OptionT.liftF(RNotificationChannel.insert(r)) _ <- OptionT.liftF( @@ -174,8 +174,8 @@ trait MigrationTasks { } for { - userData <- OptionT(QLogin.findUser(old.account)) - userId = userData.account.userId + userData <- OptionT(QLogin.findAccount(old.account)) + userId = userData.userId id <- OptionT.liftF(Ident.randomId[ConnectionIO]) now <- OptionT.liftF(Timestamp.current[ConnectionIO]) chName = Some("migrate notify items") diff --git a/modules/store/src/main/scala/db/migration/data/AllPreviewsArgs.scala b/modules/store/src/main/scala/db/migration/data/AllPreviewsArgs.scala new file mode 100644 index 00000000..fcbfafc4 --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/AllPreviewsArgs.scala @@ -0,0 +1,35 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import docspell.common._ +import io.circe.generic.semiauto._ +import io.circe.{Decoder, Encoder} + +/** Arguments for the `AllPreviewsTask` that submits tasks to generates a preview image + * for attachments. + * + * It can replace the current preview image or only generate one, if it is missing. If no + * collective is specified, it considers all attachments. + * + * @deprecated + * This structure has been replaced to use a `CollectiveId` + */ +case class AllPreviewsArgs( + collective: Option[Ident], + storeMode: MakePreviewArgs.StoreMode +) + +object AllPreviewsArgs { + + val taskName = Ident.unsafe("all-previews") + + implicit val jsonEncoder: Encoder[AllPreviewsArgs] = + deriveEncoder[AllPreviewsArgs] + implicit val jsonDecoder: Decoder[AllPreviewsArgs] = + deriveDecoder[AllPreviewsArgs] +} diff --git a/modules/store/src/main/scala/db/migration/data/ConvertAllPdfArgs.scala b/modules/store/src/main/scala/db/migration/data/ConvertAllPdfArgs.scala new file mode 100644 index 00000000..129da96b --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/ConvertAllPdfArgs.scala @@ -0,0 +1,34 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import docspell.common._ +import io.circe._ +import io.circe.generic.semiauto._ + +/** Arguments for the task that finds all pdf files that have not been converted and + * submits for each a job that will convert the file using ocrmypdf. + * + * If the `collective` argument is present, then this task and the ones that are + * submitted by this task run in the realm of the collective (and only their files are + * considered). If it is empty, it is a system task and all files are considered. + * + * @deprecated + * replaced with same using `CollectiveId` + */ +case class ConvertAllPdfArgs(collective: Option[Ident]) + +object ConvertAllPdfArgs { + + val taskName = Ident.unsafe("submit-pdf-migration-tasks") + + implicit val jsonDecoder: Decoder[ConvertAllPdfArgs] = + deriveDecoder[ConvertAllPdfArgs] + + implicit val jsonEncoder: Encoder[ConvertAllPdfArgs] = + deriveEncoder[ConvertAllPdfArgs] +} diff --git a/modules/store/src/main/scala/db/migration/data/EmptyTrashArgs.scala b/modules/store/src/main/scala/db/migration/data/EmptyTrashArgs.scala new file mode 100644 index 00000000..0eaa015c --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/EmptyTrashArgs.scala @@ -0,0 +1,52 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import docspell.common._ +import docspell.common.syntax.all._ + +import com.github.eikek.calev.CalEvent +import io.circe._ +import io.circe.generic.semiauto._ + +/** Arguments to the empty-trash task. + * + * This task is run periodically to really delete all soft-deleted items. These are items + * with state `ItemState.Deleted`. + * + * @deprecated + * This structure has been changed to use a `CollectiveId` + */ +case class EmptyTrashArgs( + collective: Ident, + minAge: Duration +) { + + def makeSubject: String = + s"Empty Trash: Remove older than ${minAge.toJava}" + + def periodicTaskId: Ident = + EmptyTrashArgs.periodicTaskId(collective) +} + +object EmptyTrashArgs { + + val taskName = Ident.unsafe("empty-trash") + + val defaultSchedule = CalEvent.unsafe("*-*-1/7 03:00:00 UTC") + + def periodicTaskId(coll: Ident): Ident = + Ident.unsafe(s"docspell") / taskName / coll + + implicit val jsonEncoder: Encoder[EmptyTrashArgs] = + deriveEncoder[EmptyTrashArgs] + implicit val jsonDecoder: Decoder[EmptyTrashArgs] = + deriveDecoder[EmptyTrashArgs] + + def parse(str: String): Either[Throwable, EmptyTrashArgs] = + str.parseJsonAs[EmptyTrashArgs] +} diff --git a/modules/store/src/main/scala/db/migration/data/ItemAddonTaskArgs.scala b/modules/store/src/main/scala/db/migration/data/ItemAddonTaskArgs.scala new file mode 100644 index 00000000..5049fdd3 --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/ItemAddonTaskArgs.scala @@ -0,0 +1,32 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import docspell.common._ +import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} +import io.circe.{Decoder, Encoder} + +/** Arguments to submit a task that runs addons configured for some existing item. + * + * If `addonTaskIds` is non empty, only these addon tasks are run. Otherwise all addon + * tasks that are configured for 'existing-item' are run. + * + * @deprecated + * replaced with version using a `CollectiveId` + */ +final case class ItemAddonTaskArgs( + collective: Ident, + itemId: Ident, + addonRunConfigs: Set[Ident] +) + +object ItemAddonTaskArgs { + val taskName: Ident = Ident.unsafe("addon-existing-item") + + implicit val jsonDecoder: Decoder[ItemAddonTaskArgs] = deriveDecoder + implicit val jsonEncoder: Encoder[ItemAddonTaskArgs] = deriveEncoder +} diff --git a/modules/store/src/main/scala/db/migration/data/LearnClassifierArgs.scala b/modules/store/src/main/scala/db/migration/data/LearnClassifierArgs.scala new file mode 100644 index 00000000..408c87c2 --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/LearnClassifierArgs.scala @@ -0,0 +1,45 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import docspell.common._ +import docspell.common.syntax.all._ + +import io.circe._ +import io.circe.generic.semiauto._ + +/** Arguments to the classify-item task. + * + * This task is run periodically and learns from existing documents to create a model for + * predicting tags of new documents. The user must give a tag category as a subset of + * possible tags. + * + * @deprecated + * This structure has been replaced to use a `CollectiveId` + */ +case class LearnClassifierArgs( + collective: Ident +) { + + def makeSubject: String = + "Learn tags" + +} + +object LearnClassifierArgs { + + val taskName = Ident.unsafe("learn-classifier") + + implicit val jsonEncoder: Encoder[LearnClassifierArgs] = + deriveEncoder[LearnClassifierArgs] + implicit val jsonDecoder: Decoder[LearnClassifierArgs] = + deriveDecoder[LearnClassifierArgs] + + def parse(str: String): Either[Throwable, LearnClassifierArgs] = + str.parseJsonAs[LearnClassifierArgs] + +} diff --git a/modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala b/modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala new file mode 100644 index 00000000..85c6a204 --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala @@ -0,0 +1,82 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import ProcessItemArgs._ +import docspell.common._ +import docspell.common.syntax.all._ + +import io.circe._ +import io.circe.generic.semiauto._ + +/** Arguments to the process-item task. + * + * This task is run for each new file to create a new item from it or to add this file as + * an attachment to an existing item. + * + * If the `itemId` is set to some value, the item is tried to load to amend with the + * given files. Otherwise a new item is created. + * + * It is also re-used by the 'ReProcessItem' task. + * + * @deprecated + * This is an old structure where the collective id was an `Ident` which is now the + * collective name. It is used to migrate database records to the new structure (same + * name in commons package) + */ +case class ProcessItemArgs(meta: ProcessMeta, files: List[File]) { + + def makeSubject: String = + files.flatMap(_.name) match { + case Nil => s"${meta.sourceAbbrev}: No files supplied" + case n :: Nil => n + case n1 :: n2 :: Nil => s"$n1, $n2" + case _ => s"${files.size} files from ${meta.sourceAbbrev}" + } + + def isNormalProcessing: Boolean = + !meta.reprocess +} + +object ProcessItemArgs { + + val taskName = Ident.unsafe("process-item") + + val multiUploadTaskName = Ident.unsafe("multi-upload-process") + + case class ProcessMeta( + collective: Ident, + itemId: Option[Ident], + language: Language, + direction: Option[Direction], + sourceAbbrev: String, + folderId: Option[Ident], + validFileTypes: Seq[MimeType], + skipDuplicate: Boolean, + fileFilter: Option[Glob], + tags: Option[List[String]], + reprocess: Boolean, + attachmentsOnly: Option[Boolean] + ) + + object ProcessMeta { + implicit val jsonEncoder: Encoder[ProcessMeta] = deriveEncoder[ProcessMeta] + implicit val jsonDecoder: Decoder[ProcessMeta] = deriveDecoder[ProcessMeta] + } + + case class File(name: Option[String], fileMetaId: FileKey) + object File { + implicit val jsonEncoder: Encoder[File] = deriveEncoder[File] + implicit val jsonDecoder: Decoder[File] = deriveDecoder[File] + } + + implicit val jsonEncoder: Encoder[ProcessItemArgs] = deriveEncoder[ProcessItemArgs] + implicit val jsonDecoder: Decoder[ProcessItemArgs] = deriveDecoder[ProcessItemArgs] + + def parse(str: String): Either[Throwable, ProcessItemArgs] = + str.parseJsonAs[ProcessItemArgs] +} diff --git a/modules/store/src/main/scala/db/migration/data/ReIndexTaskArgs.scala b/modules/store/src/main/scala/db/migration/data/ReIndexTaskArgs.scala new file mode 100644 index 00000000..a175e052 --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/ReIndexTaskArgs.scala @@ -0,0 +1,34 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import docspell.common._ +import io.circe._ +import io.circe.generic.semiauto._ + +/** @deprecated + * This structure has been replaced with one using a `CollectiveId` + */ +final case class ReIndexTaskArgs(collective: Option[Ident]) + +object ReIndexTaskArgs { + val taskName = Ident.unsafe("full-text-reindex") + + def tracker(args: ReIndexTaskArgs): Ident = + args.collective match { + case Some(cid) => + cid / DocspellSystem.migrationTaskTracker + case None => + DocspellSystem.migrationTaskTracker + } + + implicit val jsonEncoder: Encoder[ReIndexTaskArgs] = + deriveEncoder[ReIndexTaskArgs] + + implicit val jsonDecoder: Decoder[ReIndexTaskArgs] = + deriveDecoder[ReIndexTaskArgs] +} diff --git a/modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala b/modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala new file mode 100644 index 00000000..5df1fd36 --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala @@ -0,0 +1,26 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import docspell.common._ +import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} +import io.circe.{Decoder, Encoder} + +/** @deprecated + * This has been replaced with a version using a `CollectiveId` + * + * @param collective + * @param addonTaskId + */ +final case class ScheduledAddonTaskArgs(collective: Ident, addonTaskId: Ident) + +object ScheduledAddonTaskArgs { + val taskName: Ident = Ident.unsafe("addon-scheduled-task") + + implicit val jsonDecoder: Decoder[ScheduledAddonTaskArgs] = deriveDecoder + implicit val jsonEncoder: Encoder[ScheduledAddonTaskArgs] = deriveEncoder +} diff --git a/modules/store/src/main/scala/docspell/store/file/BinnyUtils.scala b/modules/store/src/main/scala/docspell/store/file/BinnyUtils.scala index 15eb4892..773f2f6e 100644 --- a/modules/store/src/main/scala/docspell/store/file/BinnyUtils.scala +++ b/modules/store/src/main/scala/docspell/store/file/BinnyUtils.scala @@ -24,7 +24,7 @@ import scodec.bits.ByteVector object BinnyUtils { def fileKeyToBinaryId(fk: FileKey): BinaryId = - BinaryId(s"${fk.collective.id}/${fk.category.id.id}/${fk.id.id}") + BinaryId(s"${fk.collective.valueAsString}/${fk.category.id.id}/${fk.id.id}") def fileKeyPartToPrefix(fkp: FileKeyPart): Option[String] = fkp match { @@ -38,7 +38,7 @@ object BinnyUtils { bid.id.split('/').toList match { case cId :: catId :: fId :: Nil => for { - coll <- Ident.fromString(cId) + coll <- CollectiveId.fromString(cId) cat <- FileCategory.fromString(catId) file <- Ident.fromString(fId) } yield FileKey(coll, cat, file) @@ -78,7 +78,7 @@ object BinnyUtils { def toPath(base: Path, binaryId: BinaryId): Path = { val fkey = unsafeBinaryIdToFileKey(binaryId) - base / fkey.collective.id / fkey.category.id.id / fkey.id.id / "file" + base / s"${fkey.collective.value}" / fkey.category.id.id / fkey.id.id / "file" } def toId(file: Path): Option[BinaryId] = @@ -87,7 +87,11 @@ object BinnyUtils { cat <- id.parent fcat <- FileCategory.fromString(cat.asId.id).toOption coll <- cat.parent - fkey = FileKey(Ident.unsafe(coll.asId.id), fcat, Ident.unsafe(id.asId.id)) + fkey = FileKey( + CollectiveId.unsafeFromString(coll.asId.id), + fcat, + Ident.unsafe(id.asId.id) + ) } yield fileKeyToBinaryId(fkey) PathMapping(toPath)(toId) diff --git a/modules/store/src/main/scala/docspell/store/file/FileRepository.scala b/modules/store/src/main/scala/docspell/store/file/FileRepository.scala index 53d96815..b64019c8 100644 --- a/modules/store/src/main/scala/docspell/store/file/FileRepository.scala +++ b/modules/store/src/main/scala/docspell/store/file/FileRepository.scala @@ -26,7 +26,7 @@ trait FileRepository[F[_]] { def delete(key: FileKey): F[Unit] def save( - collective: Ident, + collective: CollectiveId, category: FileCategory, hint: MimeTypeHint ): Pipe[F, Byte, FileKey] diff --git a/modules/store/src/main/scala/docspell/store/file/FileRepositoryImpl.scala b/modules/store/src/main/scala/docspell/store/file/FileRepositoryImpl.scala index d612d7ed..a0d02153 100644 --- a/modules/store/src/main/scala/docspell/store/file/FileRepositoryImpl.scala +++ b/modules/store/src/main/scala/docspell/store/file/FileRepositoryImpl.scala @@ -40,7 +40,7 @@ final class FileRepositoryImpl[F[_]: Sync]( bs.delete(keyFun(key)) *> attrStore.deleteAttr(key).void def save( - collective: Ident, + collective: CollectiveId, category: FileCategory, hint: MimeTypeHint ): Pipe[F, Byte, FileKey] = { @@ -60,7 +60,7 @@ final class FileRepositoryImpl[F[_]: Sync]( } def randomKey( - collective: Ident, + collective: CollectiveId, category: FileCategory ): F[FileKey] = BinaryId.random[F].map(bid => FileKey(collective, category, Ident.unsafe(bid.id))) diff --git a/modules/store/src/main/scala/docspell/store/file/FileUrlReader.scala b/modules/store/src/main/scala/docspell/store/file/FileUrlReader.scala index 47b8cc52..bd4e41ee 100644 --- a/modules/store/src/main/scala/docspell/store/file/FileUrlReader.scala +++ b/modules/store/src/main/scala/docspell/store/file/FileUrlReader.scala @@ -24,7 +24,7 @@ object FileUrlReader { scheme = Nel.of(scheme), authority = Some(""), path = LenientUri.NonEmptyPath( - Nel.of(key.collective.id, key.category.id.id, key.id.id), + Nel.of(key.collective.valueAsString, key.category.id.id, key.id.id), false ), query = None, diff --git a/modules/store/src/main/scala/docspell/store/queries/QItem.scala b/modules/store/src/main/scala/docspell/store/queries/QItem.scala index 5234940a..f33c28e5 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QItem.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QItem.scala @@ -666,7 +666,7 @@ object QItem extends FtsSupport { final case class NameAndNotes( id: Ident, - collective: Ident, + collective: CollectiveId, folder: Option[Ident], name: String, notes: Option[String], diff --git a/modules/store/src/main/scala/docspell/store/queries/QLogin.scala b/modules/store/src/main/scala/docspell/store/queries/QLogin.scala index c79c4a8d..2ec3ef13 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QLogin.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QLogin.scala @@ -58,6 +58,18 @@ object QLogin { def findUser(userId: Ident): ConnectionIO[Option[Data]] = findUser0((user, _) => user.uid === userId) + /** Finds the account given a combination of login/user-id and coll-id/coll-name pair. + */ + def findAccount(acc: AccountId): ConnectionIO[Option[AccountInfo]] = { + val collIdOpt = acc.collective.id.toLongOption.map(CollectiveId(_)) + findUser0((ut, ct) => + (ut.login === acc.user || ut.uid === acc.user) && collIdOpt + .map(id => ct.id === id) + .getOrElse(ct.name === acc.collective) + ) + .map(_.map(_.account)) + } + def findByRememberMe( rememberId: Ident, minCreated: Timestamp diff --git a/modules/store/src/main/scala/docspell/store/records/RCollective.scala b/modules/store/src/main/scala/docspell/store/records/RCollective.scala index da31fb18..48b431c6 100644 --- a/modules/store/src/main/scala/docspell/store/records/RCollective.scala +++ b/modules/store/src/main/scala/docspell/store/records/RCollective.scala @@ -37,6 +37,7 @@ object RCollective { val created = Column[Timestamp]("created", this) val all = NonEmptyList.of[Column[_]](id, name, state, language, integration, created) + val allNoId = NonEmptyList.fromListUnsafe(all.tail) } def makeDefault(collName: Ident, created: Timestamp): RCollective = @@ -53,12 +54,17 @@ object RCollective { def as(alias: String): Table = Table(Some(alias)) - def insert(value: RCollective): ConnectionIO[Int] = - DML.insert( - T, - T.all, - fr"${value.id},${value.name},${value.state},${value.language},${value.integrationEnabled},${value.created}" - ) + def insert(value: RCollective): ConnectionIO[CollectiveId] = + DML + .insertFragment( + T, + T.allNoId, + List( + fr"${value.name},${value.state},${value.language},${value.integrationEnabled},${value.created}" + ) + ) + .update + .withUniqueGeneratedKeys[CollectiveId](T.id.name) def update(value: RCollective): ConnectionIO[Int] = DML.update( @@ -139,6 +145,11 @@ object RCollective { sql.query[RCollective].option } + def findByName(cname: Ident): ConnectionIO[Option[RCollective]] = { + val sql = run(select(T.all), from(T), T.name === cname) + sql.query[RCollective].option + } + def findByItem(itemId: Ident): ConnectionIO[Option[RCollective]] = { val i = RItem.as("i") val c = RCollective.as("c") @@ -154,6 +165,11 @@ object RCollective { sql.query[Int].unique.map(_ > 0) } + def existsByName(name: Ident): ConnectionIO[Boolean] = { + val sql = Select(count(T.id).s, from(T), T.name === name).build + sql.query[Int].unique.map(_ > 0) + } + def findAll(order: Table => Column[_]): ConnectionIO[Vector[RCollective]] = { val sql = Select(select(T.all), from(T)).orderBy(order(T)) sql.build.query[RCollective].to[Vector] diff --git a/modules/store/src/main/scala/docspell/store/records/RItem.scala b/modules/store/src/main/scala/docspell/store/records/RItem.scala index e4168f64..90d03612 100644 --- a/modules/store/src/main/scala/docspell/store/records/RItem.scala +++ b/modules/store/src/main/scala/docspell/store/records/RItem.scala @@ -123,8 +123,8 @@ object RItem { fr"${v.created},${v.updated},${v.notes},${v.folderId}" ) - def getCollective(itemId: Ident): ConnectionIO[Option[Ident]] = - Select(T.cid.s, from(T), T.id === itemId).build.query[Ident].option + def getCollective(itemId: Ident): ConnectionIO[Option[CollectiveId]] = + Select(T.cid.s, from(T), T.id === itemId).build.query[CollectiveId].option def updateAll(item: RItem): ConnectionIO[Int] = for { diff --git a/modules/store/src/main/scala/docspell/store/records/RSentMail.scala b/modules/store/src/main/scala/docspell/store/records/RSentMail.scala index 6c04524b..809f1319 100644 --- a/modules/store/src/main/scala/docspell/store/records/RSentMail.scala +++ b/modules/store/src/main/scala/docspell/store/records/RSentMail.scala @@ -60,7 +60,7 @@ object RSentMail { def forItem( itemId: Ident, - accId: AccountId, + userId: Ident, messageId: String, sender: MailAddress, connName: Ident, @@ -69,10 +69,9 @@ object RSentMail { body: String ): OptionT[ConnectionIO, (RSentMail, RSentMailItem)] = for { - user <- OptionT(RUser.findByAccount(accId)) sm <- OptionT.liftF( RSentMail[ConnectionIO]( - user.uid, + userId, messageId, sender, connName, diff --git a/modules/store/src/main/scala/docspell/store/records/RShare.scala b/modules/store/src/main/scala/docspell/store/records/RShare.scala index 196c3a00..aeaf0026 100644 --- a/modules/store/src/main/scala/docspell/store/records/RShare.scala +++ b/modules/store/src/main/scala/docspell/store/records/RShare.scala @@ -94,17 +94,21 @@ object RShare { else Nil) ) - def findOne(id: Ident, cid: CollectiveId): OptionT[ConnectionIO, (RShare, RUser)] = { + def findOne( + id: Ident, + cid: CollectiveId + ): OptionT[ConnectionIO, (RShare, AccountInfo)] = { val s = RShare.as("s") val u = RUser.as("u") + val c = RCollective.as("c") OptionT( Select( - select(s.all, u.all), - from(s).innerJoin(u, u.uid === s.userId), + select(s.all, NonEmptyList.of(c.id, c.name, u.uid, u.login)), + from(s).innerJoin(u, u.uid === s.userId).innerJoin(c, c.id === u.cid), s.id === id && u.cid === cid ).build - .query[(RShare, RUser)] + .query[(RShare, AccountInfo)] .option ) } @@ -115,20 +119,21 @@ object RShare { def findActive( id: Ident, current: Timestamp - ): OptionT[ConnectionIO, (RShare, RUser)] = { + ): OptionT[ConnectionIO, (RShare, AccountInfo)] = { val s = RShare.as("s") val u = RUser.as("u") + val c = RCollective.as("c") OptionT( Select( - select(s.all, u.all), - from(s).innerJoin(u, s.userId === u.uid), + select(s.all, NonEmptyList.of(c.id, c.name, u.uid, u.login)), + from(s).innerJoin(u, s.userId === u.uid).innerJoin(c, c.id === u.cid), activeCondition(s, id, current) - ).build.query[(RShare, RUser)].option + ).build.query[(RShare, AccountInfo)].option ) } - def findCurrentActive(id: Ident): OptionT[ConnectionIO, (RShare, RUser)] = + def findCurrentActive(id: Ident): OptionT[ConnectionIO, (RShare, AccountInfo)] = OptionT.liftF(Timestamp.current[ConnectionIO]).flatMap(now => findActive(id, now)) def findActivePassword(id: Ident): OptionT[ConnectionIO, Option[Password]] = @@ -159,21 +164,22 @@ object RShare { cid: CollectiveId, ownerLogin: Option[Ident], q: Option[String] - ): ConnectionIO[List[(RShare, RUser)]] = { + ): ConnectionIO[List[(RShare, AccountInfo)]] = { val s = RShare.as("s") val u = RUser.as("u") + val c = RCollective.as("c") val ownerQ = ownerLogin.map(name => u.login === name) val nameQ = q.map(n => s.name.like(s"%$n%")) Select( - select(s.all, u.all), - from(s).innerJoin(u, u.uid === s.userId), + select(s.all, NonEmptyList.of(c.id, c.name, u.uid, u.login)), + from(s).innerJoin(u, u.uid === s.userId).innerJoin(c, c.id === u.cid), u.cid === cid &&? ownerQ &&? nameQ ) .orderBy(s.publishedAt.desc) .build - .query[(RShare, RUser)] + .query[(RShare, AccountInfo)] .to[List] } diff --git a/modules/store/src/main/scala/docspell/store/records/RSource.scala b/modules/store/src/main/scala/docspell/store/records/RSource.scala index 214660e9..1a125003 100644 --- a/modules/store/src/main/scala/docspell/store/records/RSource.scala +++ b/modules/store/src/main/scala/docspell/store/records/RSource.scala @@ -125,8 +125,8 @@ object RSource { private[records] def findEnabledSql(id: Ident): Fragment = run(select(table.all), from(table), where(table.sid === id, table.enabled === true)) - def findCollective(sourceId: Ident): ConnectionIO[Option[Ident]] = - run(select(table.cid), from(table), table.sid === sourceId).query[Ident].option + def findCollectiveId(sourceId: Ident): ConnectionIO[Option[CollectiveId]] = + run(select(table.cid), from(table), table.sid === sourceId).query[CollectiveId].option def findAll( coll: CollectiveId, diff --git a/modules/store/src/main/scala/docspell/store/records/RUser.scala b/modules/store/src/main/scala/docspell/store/records/RUser.scala index 1e28c6f6..20d6f16c 100644 --- a/modules/store/src/main/scala/docspell/store/records/RUser.scala +++ b/modules/store/src/main/scala/docspell/store/records/RUser.scala @@ -129,9 +129,20 @@ object RUser { sql.query[RUser].option } - def findById(userId: Ident): ConnectionIO[Option[RUser]] = { + def findById(userId: Ident, cid: Option[CollectiveId]): ConnectionIO[Option[RUser]] = { val t = Table(None) - val sql = run(select(t.all), from(t), t.uid === userId) + val sql = + run(select(t.all), from(t), t.uid === userId &&? cid.map(id => t.cid === id)) + sql.query[RUser].option + } + + def findByLogin( + login: Ident, + cid: Option[CollectiveId] + ): ConnectionIO[Option[RUser]] = { + val t = Table(None) + val sql = + run(select(t.all), from(t), t.login === login &&? cid.map(id => t.cid === id)) sql.query[RUser].option } diff --git a/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala b/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala index c8b68e28..cd3e2a53 100644 --- a/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala +++ b/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala @@ -77,8 +77,8 @@ object RUserEmail { now ) - def fromAccount( - accId: AccountId, + def fromUser( + userId: Ident, name: Ident, smtpHost: String, smtpPort: Option[Int], @@ -92,10 +92,9 @@ object RUserEmail { for { now <- OptionT.liftF(Timestamp.current[ConnectionIO]) id <- OptionT.liftF(Ident.randomId[ConnectionIO]) - user <- OptionT(RUser.findByAccount(accId)) } yield RUserEmail( id, - user.uid, + userId, name, smtpHost, smtpPort, diff --git a/modules/store/src/main/scala/docspell/store/records/RUserImap.scala b/modules/store/src/main/scala/docspell/store/records/RUserImap.scala index ae4d3265..127d82fe 100644 --- a/modules/store/src/main/scala/docspell/store/records/RUserImap.scala +++ b/modules/store/src/main/scala/docspell/store/records/RUserImap.scala @@ -75,8 +75,8 @@ object RUserImap { now ) - def fromAccount( - accId: AccountId, + def fromUser( + userId: Ident, name: Ident, imapHost: String, imapPort: Option[Int], @@ -89,10 +89,9 @@ object RUserImap { for { now <- OptionT.liftF(Timestamp.current[ConnectionIO]) id <- OptionT.liftF(Ident.randomId[ConnectionIO]) - user <- OptionT(RUser.findByAccount(accId)) } yield RUserImap( id, - user.uid, + userId, name, imapHost, imapPort, diff --git a/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala b/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala index ad081354..9647d88f 100644 --- a/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala +++ b/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala @@ -99,8 +99,8 @@ class TempFtsOpsTest extends DatabaseTest { for { today <- IO(LocalDate.now()) account <- store - .transact(QLogin.findUser(DocspellSystem.account)) - .map(_.get.account) + .transact(QLogin.findAccount(DocspellSystem.account)) + .map(_.get) tempTable = ftsResults .through(TempFtsOps.prepareTable(store.dbms, "fts_result")) .compile From eabcffe71a57c6b763b6db6deddbe9e2273db8a8 Mon Sep 17 00:00:00 2001 From: eikek Date: Thu, 4 Aug 2022 12:44:09 +0200 Subject: [PATCH 05/15] Adopt restserver to new collective-id --- .../docspell/backend/ops/OCollective.scala | 2 - .../docspell/backend/ops/ODownloadAll.scala | 10 +-- .../scala/docspell/backend/ops/OFolder.scala | 14 ++-- .../scala/docspell/backend/ops/OJob.scala | 42 +++++++--- .../docspell/common/ScanMailboxArgs.scala | 2 +- .../api/PeriodicDueItemsArgs.scala | 2 +- .../notification/api/PeriodicQueryArgs.scala | 2 +- .../docspell/restserver/auth/CookieData.scala | 6 +- .../restserver/conv/Conversions.scala | 34 +++++--- .../routes/AddonArchiveRoutes.scala | 8 +- .../routes/AddonRunConfigRoutes.scala | 8 +- .../restserver/routes/AddonRunRoutes.scala | 8 +- .../routes/AttachmentMultiRoutes.scala | 2 +- .../restserver/routes/AttachmentRoutes.scala | 36 ++++---- .../restserver/routes/CheckFileRoutes.scala | 2 +- .../routes/ClientSettingsRoutes.scala | 30 +++++-- .../restserver/routes/CollectiveRoutes.scala | 19 ++--- .../restserver/routes/CustomFieldRoutes.scala | 10 +-- .../restserver/routes/DownloadAllRoutes.scala | 8 +- .../restserver/routes/EquipmentRoutes.scala | 10 +-- .../restserver/routes/FolderRoutes.scala | 28 ++++--- .../routes/FullTextIndexRoutes.scala | 4 +- .../routes/IntegrationEndpointRoutes.scala | 30 +++---- .../restserver/routes/ItemLinkRoutes.scala | 10 +-- .../restserver/routes/ItemMultiRoutes.scala | 51 +++++++----- .../restserver/routes/ItemRoutes.scala | 82 +++++++++++-------- .../restserver/routes/JobQueueRoutes.scala | 9 +- .../restserver/routes/LoginRoutes.scala | 2 +- .../restserver/routes/MailSendRoutes.scala | 4 +- .../routes/MailSettingsRoutes.scala | 24 +++--- .../routes/NotificationRoutes.scala | 18 ++-- .../routes/NotifyDueItemsRoutes.scala | 2 +- .../routes/OrganizationRoutes.scala | 17 ++-- .../routes/PeriodicQueryRoutes.scala | 2 +- .../restserver/routes/PersonRoutes.scala | 13 ++- .../restserver/routes/ScanMailboxRoutes.scala | 10 +-- .../restserver/routes/SentMailRoutes.scala | 6 +- .../restserver/routes/ShareRoutes.scala | 17 ++-- .../restserver/routes/SourceRoutes.scala | 8 +- .../restserver/routes/TagRoutes.scala | 8 +- .../restserver/routes/UploadRoutes.scala | 22 +++-- .../restserver/routes/UserRoutes.scala | 36 +++++--- .../restserver/ws/WebSocketRoutes.scala | 5 +- .../scala/db/migration/MigrationTasks.scala | 20 +++-- .../migration/data/PeriodicDueItemsArgs.scala | 43 ++++++++++ .../db/migration/data/PeriodicQueryArgs.scala | 33 ++++++++ .../db/migration/data/ScanMailboxArgs.scala | 69 ++++++++++++++++ .../docspell/store/records/RFolder.scala | 4 +- 48 files changed, 539 insertions(+), 293 deletions(-) create mode 100644 modules/store/src/main/scala/db/migration/data/PeriodicDueItemsArgs.scala create mode 100644 modules/store/src/main/scala/db/migration/data/PeriodicQueryArgs.scala create mode 100644 modules/store/src/main/scala/db/migration/data/ScanMailboxArgs.scala diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala b/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala index 0613cd06..41b3bfdd 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala @@ -28,7 +28,6 @@ trait OCollective[F[_]] { def updateSettings( collective: CollectiveId, - collectiveName: Ident, settings: OCollective.Settings ): F[AddResult] @@ -147,7 +146,6 @@ object OCollective { def updateSettings( collectiveId: CollectiveId, - collectiveName: Ident, sett: Settings ): F[AddResult] = store diff --git a/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala b/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala index 6cec4b11..99d4a9c0 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala @@ -40,10 +40,10 @@ trait ODownloadAll[F[_]] { def submit(accountId: AccountInfo, req: DownloadRequest): F[DownloadSummary] /** Given the id from the summary, cancels a running job. */ - def cancelDownload(accountId: AccountId, id: Ident): F[OJob.JobCancelResult] + def cancelDownload(cid: CollectiveId, id: Ident): F[OJob.JobCancelResult] /** Returns the file if it is present, given a summary id. */ - def getFile(collective: Ident, id: Ident): F[Option[DownloadAllFile[F]]] + def getFile(collective: CollectiveId, id: Ident): F[Option[DownloadAllFile[F]]] /** Deletes a download archive given it's id. */ def deleteFile(id: Ident): F[Unit] @@ -122,7 +122,7 @@ object ODownloadAll { else DownloadState.NotPresent } yield state - def getFile(collective: Ident, id: Ident) = + def getFile(collective: CollectiveId, id: Ident) = OptionT(store.transact(RDownloadQuery.findById(id))) .map(_._2) .map(md => @@ -156,10 +156,10 @@ object ODownloadAll { _ <- store.fileRepo.delete(fkey) } yield () - def cancelDownload(accountId: AccountId, id: Ident) = + def cancelDownload(cid: CollectiveId, id: Ident) = OptionT(store.transact(RDownloadQuery.findById(id))) .flatMap(t => OptionT(store.transact(RJob.findNonFinalByTracker(t._1.id)))) - .semiflatMap(job => jobs.cancelJob(job.id, accountId.collective)) + .semiflatMap(job => jobs.cancelJob(job.id, UserTaskScope.collective(cid))) .getOrElse(JobCancelResult.jobNotFound) } diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala b/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala index ea8a95a5..744d3d02 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala @@ -30,10 +30,10 @@ trait OFolder[F[_]] { userId: Ident ): F[Option[OFolder.FolderDetail]] - /** Adds a new folder. If `login` is non-empty, the `folder.user` property is ignored - * and the user-id is determined by the given login name. + /** Adds a new folder. If `login` is non-empty, the `folder.owner` property is ignored + * and its value is determined by the given login name. */ - def add(folder: RFolder, userId: Option[Ident]): F[AddResult] + def add(folder: RFolder, login: Option[Ident]): F[AddResult] def changeName( folder: Ident, @@ -123,11 +123,11 @@ object OFolder { ): F[Option[FolderDetail]] = store.transact(QFolder.findById(id, collectiveId, userId)) - def add(folder: RFolder, userId: Option[Ident]): F[AddResult] = { - val insert = userId match { - case Some(uid) => + def add(folder: RFolder, login: Option[Ident]): F[AddResult] = { + val insert = login match { + case Some(userLogin) => for { - user <- RUser.findById(uid, folder.collectiveId.some) + user <- RUser.findByLogin(userLogin, folder.collectiveId.some) s = user.map(u => folder.copy(owner = u.uid)).getOrElse(folder) n <- RFolder.insert(s) } yield n diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OJob.scala b/modules/backend/src/main/scala/docspell/backend/ops/OJob.scala index ba848c7f..67923b7d 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OJob.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OJob.scala @@ -9,11 +9,11 @@ package docspell.backend.ops import cats.data.OptionT import cats.effect._ import cats.implicits._ - import docspell.backend.ops.OJob.{CollectiveQueueState, JobCancelResult} import docspell.common._ import docspell.pubsub.api.PubSubT import docspell.scheduler.msg.JobDone +import docspell.scheduler.usertask.UserTaskScope import docspell.store.Store import docspell.store.UpdateResult import docspell.store.queries.QJobQueue @@ -21,13 +21,13 @@ import docspell.store.records.{RJob, RJobLog} trait OJob[F[_]] { - def queueState(collective: Ident, maxResults: Int): F[CollectiveQueueState] + def queueState(collective: UserTaskScope, maxResults: Int): F[CollectiveQueueState] - def cancelJob(id: Ident, collective: Ident): F[JobCancelResult] + def cancelJob(id: Ident, collective: UserTaskScope): F[JobCancelResult] - def setPriority(id: Ident, collective: Ident, prio: Priority): F[UpdateResult] + def setPriority(id: Ident, collective: UserTaskScope, prio: Priority): F[UpdateResult] - def getUnfinishedJobCount(collective: Ident): F[Int] + def getUnfinishedJobCount(collective: UserTaskScope): F[Int] } object OJob { @@ -61,20 +61,34 @@ object OJob { Resource.pure[F, OJob[F]](new OJob[F] { private[this] val logger = docspell.logging.getLogger[F] - def queueState(collective: Ident, maxResults: Int): F[CollectiveQueueState] = + private def scopeToGroup(s: UserTaskScope) = + s.collectiveId + .map(_.valueAsIdent) + .getOrElse(DocspellSystem.taskGroup) + + def queueState( + collective: UserTaskScope, + maxResults: Int + ): F[CollectiveQueueState] = store .transact( - QJobQueue.queueStateSnapshot(collective, maxResults.toLong) + QJobQueue.queueStateSnapshot(scopeToGroup(collective), maxResults.toLong) ) .map(t => JobDetail(t._1, t._2)) .compile .toVector .map(CollectiveQueueState) - def setPriority(id: Ident, collective: Ident, prio: Priority): F[UpdateResult] = - UpdateResult.fromUpdate(store.transact(RJob.setPriority(id, collective, prio))) + def setPriority( + id: Ident, + collective: UserTaskScope, + prio: Priority + ): F[UpdateResult] = + UpdateResult.fromUpdate( + store.transact(RJob.setPriority(id, scopeToGroup(collective), prio)) + ) - def cancelJob(id: Ident, collective: Ident): F[JobCancelResult] = { + def cancelJob(id: Ident, collective: UserTaskScope): F[JobCancelResult] = { def remove(job: RJob): F[JobCancelResult] = for { n <- store.transact(RJob.delete(job.id)) @@ -99,7 +113,9 @@ object OJob { } (for { - job <- OptionT(store.transact(RJob.findByIdAndGroup(id, collective))) + job <- OptionT( + store.transact(RJob.findByIdAndGroup(id, scopeToGroup(collective))) + ) result <- OptionT.liftF( if (job.isInProgress) tryCancel(job) else remove(job) @@ -108,7 +124,7 @@ object OJob { .getOrElse(JobCancelResult.jobNotFound) } - def getUnfinishedJobCount(collective: Ident): F[Int] = - store.transact(RJob.getUnfinishedCount(collective)) + def getUnfinishedJobCount(collective: UserTaskScope): F[Int] = + store.transact(RJob.getUnfinishedCount(scopeToGroup(collective))) }) } diff --git a/modules/common/src/main/scala/docspell/common/ScanMailboxArgs.scala b/modules/common/src/main/scala/docspell/common/ScanMailboxArgs.scala index 9b886c90..963c3808 100644 --- a/modules/common/src/main/scala/docspell/common/ScanMailboxArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ScanMailboxArgs.scala @@ -20,7 +20,7 @@ import io.circe.generic.semiauto._ */ case class ScanMailboxArgs( // the docspell user account - account: AccountId, + account: AccountInfo, // the configured imap connection imapConnection: Ident, // scan folders recursively diff --git a/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicDueItemsArgs.scala b/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicDueItemsArgs.scala index 7de991cf..e71253b3 100644 --- a/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicDueItemsArgs.scala +++ b/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicDueItemsArgs.scala @@ -21,7 +21,7 @@ import io.circe.{Decoder, Encoder} * the json data of the corresponding task. */ final case class PeriodicDueItemsArgs( - account: AccountId, + account: AccountInfo, channels: NonEmptyList[ChannelRef], remindDays: Int, daysBack: Option[Int], diff --git a/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicQueryArgs.scala b/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicQueryArgs.scala index 8e0dd405..01f4497d 100644 --- a/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicQueryArgs.scala +++ b/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicQueryArgs.scala @@ -14,7 +14,7 @@ import io.circe.generic.semiauto import io.circe.{Decoder, Encoder} final case class PeriodicQueryArgs( - account: AccountId, + account: AccountInfo, channels: NonEmptyList[ChannelRef], query: Option[ItemQueryString], bookmark: Option[String], diff --git a/modules/restserver/src/main/scala/docspell/restserver/auth/CookieData.scala b/modules/restserver/src/main/scala/docspell/restserver/auth/CookieData.scala index 531ed9bf..577276f1 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/auth/CookieData.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/auth/CookieData.scala @@ -7,14 +7,12 @@ package docspell.restserver.auth import docspell.backend.auth._ -import docspell.common.AccountId -import docspell.common.LenientUri - +import docspell.common.{AccountInfo, LenientUri} import org.http4s._ import org.typelevel.ci.CIString case class CookieData(auth: AuthToken) { - def accountId: AccountId = auth.account + def accountInfo: AccountInfo = auth.account def asString: String = auth.asString def asCookie(baseUrl: LenientUri): ResponseCookie = { diff --git a/modules/restserver/src/main/scala/docspell/restserver/conv/Conversions.scala b/modules/restserver/src/main/scala/docspell/restserver/conv/Conversions.scala index 806b69c8..38636cc3 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/conv/Conversions.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/conv/Conversions.scala @@ -371,7 +371,10 @@ trait Conversions { ) } - def newOrg[F[_]: Sync](v: Organization, cid: Ident): F[OOrganization.OrgAndContacts] = { + def newOrg[F[_]: Sync]( + v: Organization, + cid: CollectiveId + ): F[OOrganization.OrgAndContacts] = { def contacts(oid: Ident) = v.contacts.traverse(c => newContact(c, oid.some, None)) for { @@ -397,7 +400,7 @@ trait Conversions { def changeOrg[F[_]: Sync]( v: Organization, - cid: Ident + cid: CollectiveId ): F[OOrganization.OrgAndContacts] = { def contacts(oid: Ident) = v.contacts.traverse(c => newContact(c, oid.some, None)) @@ -435,7 +438,10 @@ trait Conversions { ) } - def newPerson[F[_]: Sync](v: Person, cid: Ident): F[OOrganization.PersonAndContacts] = { + def newPerson[F[_]: Sync]( + v: Person, + cid: CollectiveId + ): F[OOrganization.PersonAndContacts] = { def contacts(pid: Ident) = v.contacts.traverse(c => newContact(c, None, pid.some)) for { @@ -461,7 +467,7 @@ trait Conversions { def changePerson[F[_]: Sync]( v: Person, - cid: Ident + cid: CollectiveId ): F[OOrganization.PersonAndContacts] = { def contacts(pid: Ident) = v.contacts.traverse(c => newContact(c, None, pid.some)) @@ -512,7 +518,7 @@ trait Conversions { ru.created ) - def newUser[F[_]: Sync](u: User, cid: Ident): F[RUser] = + def newUser[F[_]: Sync](u: User, cid: CollectiveId): F[RUser] = Conversions.timeId.map { case (id, now) => RUser( id, @@ -528,7 +534,7 @@ trait Conversions { ) } - def changeUser(u: User, cid: Ident): RUser = + def changeUser(u: User, cid: CollectiveId): RUser = RUser( u.id, u.login, @@ -547,12 +553,12 @@ trait Conversions { def mkTag(rt: RTag): Tag = Tag(rt.tagId, rt.name, rt.category, rt.created) - def newTag[F[_]: Sync](t: Tag, cid: Ident): F[RTag] = + def newTag[F[_]: Sync](t: Tag, cid: CollectiveId): F[RTag] = Conversions.timeId.map { case (id, now) => RTag(id, cid, t.name.trim, t.category.map(_.trim), now) } - def changeTag(t: Tag, cid: Ident): RTag = + def changeTag(t: Tag, cid: CollectiveId): RTag = RTag(t.id, cid, t.name.trim, t.category.map(_.trim), t.created) // sources @@ -575,7 +581,7 @@ trait Conversions { TagList(s.tags.length, s.tags.map(mkTag).toList) ) - def newSource[F[_]: Sync](s: Source, cid: Ident): F[RSource] = + def newSource[F[_]: Sync](s: Source, cid: CollectiveId): F[RSource] = Conversions.timeId.map { case (id, now) => RSource( id, @@ -593,10 +599,10 @@ trait Conversions { ) } - def changeSource[F[_]](s: Source, coll: Ident): RSource = + def changeSource(s: Source, cid: CollectiveId): RSource = RSource( s.id, - coll, + cid, s.abbrev.trim, s.description, s.counter, @@ -613,12 +619,12 @@ trait Conversions { def mkEquipment(re: REquipment): Equipment = Equipment(re.eid, re.name, re.created, re.notes, re.use) - def newEquipment[F[_]: Sync](e: Equipment, cid: Ident): F[REquipment] = + def newEquipment[F[_]: Sync](e: Equipment, cid: CollectiveId): F[REquipment] = Conversions.timeId.map { case (id, now) => REquipment(id, cid, e.name.trim, now, now, e.notes, e.use) } - def changeEquipment[F[_]: Sync](e: Equipment, cid: Ident): F[REquipment] = + def changeEquipment[F[_]: Sync](e: Equipment, cid: CollectiveId): F[REquipment] = Timestamp .current[F] .map(now => REquipment(e.id, cid, e.name.trim, e.created, now, e.notes, e.use)) @@ -681,6 +687,8 @@ trait Conversions { case UploadResult.NoFiles => BasicResult(false, "There were no files to submit.") case UploadResult.NoSource => BasicResult(false, "The source id is not valid.") case UploadResult.NoItem => BasicResult(false, "The item could not be found.") + case UploadResult.NoCollective => + BasicResult(false, "The collective could not be found.") } def basicResult(cr: PassChangeResult): BasicResult = diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/AddonArchiveRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/AddonArchiveRoutes.scala index 72c9b808..6995ebb8 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/AddonArchiveRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/AddonArchiveRoutes.scala @@ -39,7 +39,7 @@ object AddonArchiveRoutes extends AddonValidationSupport { HttpRoutes.of { case GET -> Root => for { - all <- backend.addons.getAllAddons(token.account.collective) + all <- backend.addons.getAllAddons(token.account.collectiveId) resp <- Ok( AddonList( all.map(r => @@ -62,7 +62,7 @@ object AddonArchiveRoutes extends AddonValidationSupport { for { input <- req.as[AddonRegister] install = backend.addons.registerAddon( - token.account.collective, + token.account.collectiveId, input.url, None ) @@ -82,7 +82,7 @@ object AddonArchiveRoutes extends AddonValidationSupport { s"Addon updated: ${m.nameAndVersion}" ) ) - val update = backend.addons.refreshAddon(token.account.collective, id) + val update = backend.addons.refreshAddon(token.account.collectiveId, id) for { resp <- if (sync) @@ -97,7 +97,7 @@ object AddonArchiveRoutes extends AddonValidationSupport { case DELETE -> Root / Ident(id) => for { - flag <- backend.addons.deleteAddon(token.account.collective, id) + flag <- backend.addons.deleteAddon(token.account.collectiveId, id) resp <- if (flag) Ok(BasicResult(true, "Addon deleted")) else NotFound(BasicResult(false, "Addon not found")) diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunConfigRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunConfigRoutes.scala index d146eef1..4f93c9cf 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunConfigRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunConfigRoutes.scala @@ -29,7 +29,7 @@ object AddonRunConfigRoutes { HttpRoutes.of { case GET -> Root => for { - all <- backend.addons.getAllAddonRunConfigs(token.account.collective) + all <- backend.addons.getAllAddonRunConfigs(token.account.collectiveId) resp <- Ok(AddonRunConfigList(all.map(convertInfoTask))) } yield resp @@ -39,7 +39,7 @@ object AddonRunConfigRoutes { data = convertInsertTask(Ident.unsafe(""), input) res <- data.flatTraverse(in => backend.addons - .upsertAddonRunConfig(token.account.collective, in) + .upsertAddonRunConfig(token.account.collectiveId, in) .map(_.leftMap(_.message)) ) resp <- res.fold( @@ -54,7 +54,7 @@ object AddonRunConfigRoutes { data = convertInsertTask(id, input) res <- data.flatTraverse(in => backend.addons - .upsertAddonRunConfig(token.account.collective, in) + .upsertAddonRunConfig(token.account.collectiveId, in) .map(_.leftMap(_.message)) ) resp <- res.fold( @@ -65,7 +65,7 @@ object AddonRunConfigRoutes { case DELETE -> Root / Ident(id) => for { - flag <- backend.addons.deleteAddonRunConfig(token.account.collective, id) + flag <- backend.addons.deleteAddonRunConfig(token.account.collectiveId, id) resp <- if (flag) Ok(BasicResult(true, "Addon task deleted")) else NotFound(BasicResult(false, "Addon task not found")) diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunRoutes.scala index 3b167b2d..25eb46fc 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunRoutes.scala @@ -9,12 +9,11 @@ package docspell.restserver.routes import cats.data.NonEmptyList import cats.effect._ import cats.syntax.all._ - import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.restapi.model._ import docspell.restserver.http4s.ThrowableResponseMapper - +import docspell.scheduler.usertask.UserTaskScope import org.http4s.HttpRoutes import org.http4s.circe.CirceEntityCodec._ import org.http4s.dsl.Http4sDsl @@ -29,9 +28,10 @@ object AddonRunRoutes { for { input <- req.as[AddonRunExistingItem] _ <- backend.addons.runAddonForItem( - token.account, + token.account.collectiveId, NonEmptyList(input.itemId, input.additionalItems), - input.addonRunConfigIds.toSet + input.addonRunConfigIds.toSet, + UserTaskScope(token.account) ) resp <- Ok(BasicResult(true, "Job for running addons submitted.")) } yield resp diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentMultiRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentMultiRoutes.scala index 6a0baa9e..b913b60a 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentMultiRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentMultiRoutes.scala @@ -33,7 +33,7 @@ object AttachmentMultiRoutes extends NonEmptyListSupport { for { json <- req.as[IdList] attachments <- requireNonEmpty(json.ids) - n <- backend.item.deleteAttachmentMultiple(attachments, user.account.collective) + n <- backend.item.deleteAttachmentMultiple(attachments, user.account.collectiveId) res = BasicResult( n > 0, if (n > 0) "Attachment(s) deleted" else "Attachment deletion failed." diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentRoutes.scala index 9d2264e0..39574043 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentRoutes.scala @@ -8,7 +8,6 @@ package docspell.restserver.routes import cats.effect._ import cats.implicits._ - import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.backend.ops._ @@ -18,7 +17,7 @@ import docspell.restapi.model._ import docspell.restserver.conv.Conversions import docspell.restserver.http4s.BinaryUtil import docspell.restserver.webapp.Webjars - +import docspell.scheduler.usertask.UserTaskScope import org.http4s._ import org.http4s.circe.CirceEntityDecoder._ import org.http4s.circe.CirceEntityEncoder._ @@ -45,19 +44,22 @@ object AttachmentRoutes { HttpRoutes.of { case HEAD -> Root / Ident(id) => for { - fileData <- backend.itemSearch.findAttachment(id, user.account.collective) + fileData <- backend.itemSearch.findAttachment(id, user.account.collectiveId) resp <- BinaryUtil.respondHead(dsl)(fileData) } yield resp case req @ GET -> Root / Ident(id) => for { - fileData <- backend.itemSearch.findAttachment(id, user.account.collective) + fileData <- backend.itemSearch.findAttachment(id, user.account.collectiveId) resp <- BinaryUtil.respond[F](dsl, req)(fileData) } yield resp case HEAD -> Root / Ident(id) / "original" => for { - fileData <- backend.itemSearch.findAttachmentSource(id, user.account.collective) + fileData <- backend.itemSearch.findAttachmentSource( + id, + user.account.collectiveId + ) resp <- fileData .map(data => withResponseHeaders(Ok())(data)) @@ -66,7 +68,10 @@ object AttachmentRoutes { case req @ GET -> Root / Ident(id) / "original" => for { - fileData <- backend.itemSearch.findAttachmentSource(id, user.account.collective) + fileData <- backend.itemSearch.findAttachmentSource( + id, + user.account.collectiveId + ) inm = req.headers.get[`If-None-Match`].flatMap(_.tags) matches = BinaryUtil.matchETag(fileData.map(_.meta), inm) resp <- @@ -81,7 +86,7 @@ object AttachmentRoutes { case HEAD -> Root / Ident(id) / "archive" => for { fileData <- - backend.itemSearch.findAttachmentArchive(id, user.account.collective) + backend.itemSearch.findAttachmentArchive(id, user.account.collectiveId) resp <- fileData .map(data => withResponseHeaders(Ok())(data)) @@ -91,7 +96,7 @@ object AttachmentRoutes { case req @ GET -> Root / Ident(id) / "archive" => for { fileData <- - backend.itemSearch.findAttachmentArchive(id, user.account.collective) + backend.itemSearch.findAttachmentArchive(id, user.account.collectiveId) inm = req.headers.get[`If-None-Match`].flatMap(_.tags) matches = BinaryUtil.matchETag(fileData.map(_.meta), inm) resp <- @@ -106,14 +111,14 @@ object AttachmentRoutes { case req @ GET -> Root / Ident(id) / "preview" => for { fileData <- - backend.itemSearch.findAttachmentPreview(id, user.account.collective) + backend.itemSearch.findAttachmentPreview(id, user.account.collectiveId) resp <- BinaryUtil.respondPreview(dsl, req)(fileData) } yield resp case HEAD -> Root / Ident(id) / "preview" => for { fileData <- - backend.itemSearch.findAttachmentPreview(id, user.account.collective) + backend.itemSearch.findAttachmentPreview(id, user.account.collectiveId) resp <- BinaryUtil.respondPreviewHead(dsl)(fileData) } yield resp @@ -121,7 +126,7 @@ object AttachmentRoutes { for { res <- backend.item.generatePreview( MakePreviewArgs.replace(id), - user.account + UserTaskScope(user.account) ) resp <- Ok( Conversions.basicResult(res, "Generating preview image task submitted.") @@ -138,7 +143,7 @@ object AttachmentRoutes { case GET -> Root / Ident(id) / "meta" => for { - rm <- backend.itemSearch.findAttachmentMeta(id, user.account.collective) + rm <- backend.itemSearch.findAttachmentMeta(id, user.account.collectiveId) md = rm.map(Conversions.mkAttachmentMeta) resp <- md.map(Ok(_)).getOrElse(NotFound(BasicResult(false, "Not found."))) } yield resp @@ -146,13 +151,13 @@ object AttachmentRoutes { case req @ POST -> Root / Ident(id) / "name" => for { nn <- req.as[OptionalText] - res <- backend.item.setAttachmentName(id, nn.text, user.account.collective) + res <- backend.item.setAttachmentName(id, nn.text, user.account.collectiveId) resp <- Ok(Conversions.basicResult(res, "Name updated.")) } yield resp case DELETE -> Root / Ident(id) => for { - n <- backend.item.deleteAttachment(id, user.account.collective) + n <- backend.item.deleteAttachment(id, user.account.collectiveId) res = if (n == 0) BasicResult(false, "Attachment not found") else BasicResult(true, "Attachment deleted.") @@ -177,10 +182,9 @@ object AttachmentRoutes { case POST -> Root / "convertallpdfs" => for { res <- - backend.item.convertAllPdf(None, None) + backend.item.convertAllPdf(None, UserTaskScope.system) resp <- Ok(Conversions.basicResult(res, "Convert all PDFs task submitted")) } yield resp } } - } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/CheckFileRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/CheckFileRoutes.scala index 1e471b48..e03dfe66 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/CheckFileRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/CheckFileRoutes.scala @@ -29,7 +29,7 @@ object CheckFileRoutes { HttpRoutes.of { case GET -> Root / checksum => for { items <- - backend.itemSearch.findByFileCollective(checksum, user.account.collective) + backend.itemSearch.findByFileCollective(checksum, user.account.collectiveId) resp <- Ok(convert(items)) } yield resp diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/ClientSettingsRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/ClientSettingsRoutes.scala index 86ea63b4..f259103c 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/ClientSettingsRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/ClientSettingsRoutes.scala @@ -36,7 +36,7 @@ object ClientSettingsRoutes { _ <- OptionT.liftF(logger.debug(s"Get client settings for share ${token.id}")) share <- backend.share.findActiveById(token.id) sett <- OptionT( - backend.clientSettings.loadCollective(clientId, share.user.accountId) + backend.clientSettings.loadCollective(clientId, share.account.collectiveId) ) res <- OptionT.liftF(Ok(sett.settingsData)) } yield res) @@ -51,7 +51,11 @@ object ClientSettingsRoutes { HttpRoutes.of { case GET -> Root / Ident(clientId) => for { - mergedData <- backend.clientSettings.loadMerged(clientId, user.account) + mergedData <- backend.clientSettings.loadMerged( + clientId, + user.account.collectiveId, + user.account.userId + ) res <- mergedData match { case Some(j) => Ok(j) case None => Ok(Map.empty[String, String]) @@ -61,13 +65,13 @@ object ClientSettingsRoutes { case req @ PUT -> Root / "user" / Ident(clientId) => for { data <- req.as[Json] - _ <- backend.clientSettings.saveUser(clientId, user.account, data) + _ <- backend.clientSettings.saveUser(clientId, user.account.userId, data) res <- Ok(BasicResult(true, "Settings stored")) } yield res case GET -> Root / "user" / Ident(clientId) => for { - data <- backend.clientSettings.loadUser(clientId, user.account) + data <- backend.clientSettings.loadUser(clientId, user.account.userId) res <- data match { case Some(d) => Ok(d.settingsData) case None => Ok(Map.empty[String, String]) @@ -76,7 +80,7 @@ object ClientSettingsRoutes { case DELETE -> Root / "user" / Ident(clientId) => for { - flag <- backend.clientSettings.deleteUser(clientId, user.account) + flag <- backend.clientSettings.deleteUser(clientId, user.account.userId) res <- Ok( BasicResult( flag, @@ -88,13 +92,20 @@ object ClientSettingsRoutes { case req @ PUT -> Root / "collective" / Ident(clientId) => for { data <- req.as[Json] - _ <- backend.clientSettings.saveCollective(clientId, user.account, data) + _ <- backend.clientSettings.saveCollective( + clientId, + user.account.collectiveId, + data + ) res <- Ok(BasicResult(true, "Settings stored")) } yield res case GET -> Root / "collective" / Ident(clientId) => for { - data <- backend.clientSettings.loadCollective(clientId, user.account) + data <- backend.clientSettings.loadCollective( + clientId, + user.account.collectiveId + ) res <- data match { case Some(d) => Ok(d.settingsData) case None => Ok(Map.empty[String, String]) @@ -103,7 +114,10 @@ object ClientSettingsRoutes { case DELETE -> Root / "collective" / Ident(clientId) => for { - flag <- backend.clientSettings.deleteCollective(clientId, user.account) + flag <- backend.clientSettings.deleteCollective( + clientId, + user.account.collectiveId + ) res <- Ok( BasicResult( flag, diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/CollectiveRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/CollectiveRoutes.scala index 8e4cc6e6..faedbac0 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/CollectiveRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/CollectiveRoutes.scala @@ -33,19 +33,19 @@ object CollectiveRoutes { case GET -> Root => for { collDb <- backend.collective.find(user.account.collective) - coll = collDb.map(c => Collective(c.id, c.state, c.created)) + coll = collDb.map(c => Collective(c.name, c.state, c.created)) resp <- coll.toResponse() } yield resp case GET -> Root / "insights" => for { - ins <- backend.collective.insights(user.account.collective) + ins <- backend.collective.insights(user.account.collectiveId) resp <- Ok(Conversions.mkItemInsights(ins)) } yield resp case GET -> Root / "tagcloud" => for { - cloud <- backend.collective.tagCloud(user.account.collective) + cloud <- backend.collective.tagCloud(user.account.collectiveId) resp <- Ok(Conversions.mkTagCloud(cloud)) } yield resp @@ -73,13 +73,13 @@ object CollectiveRoutes { ) res <- backend.collective - .updateSettings(user.account.collective, sett) + .updateSettings(user.account.collectiveId, sett) resp <- Ok(Conversions.basicResult(res, "Settings updated.")) } yield resp case GET -> Root / "settings" => for { - settDb <- backend.collective.findSettings(user.account.collective) + settDb <- backend.collective.findSettings(user.account.collectiveId) trash = settDb.flatMap(_.emptyTrash).getOrElse(OCollective.EmptyTrash.default) sett = settDb.map(c => CollectiveSettings( @@ -108,8 +108,8 @@ object CollectiveRoutes { for { res <- backend.collective - .getContacts(user.account.collective, q.map(_.q), kind) - .take(50) + .getContacts(user.account.collectiveId, q.map(_.q), kind) + .take(100) .compile .toList resp <- Ok(ContactList(res.map(Conversions.mkContact))) @@ -117,7 +117,7 @@ object CollectiveRoutes { case POST -> Root / "classifier" / "startonce" => for { - _ <- backend.collective.startLearnClassifier(user.account.collective) + _ <- backend.collective.startLearnClassifier(user.account.collectiveId) resp <- Ok(BasicResult(true, "Task submitted")) } yield resp @@ -125,11 +125,10 @@ object CollectiveRoutes { for { data <- req.as[EmptyTrashSetting] _ <- backend.collective.startEmptyTrash( - EmptyTrashArgs(user.account.collective, data.minAge) + EmptyTrashArgs(user.account.collectiveId, data.minAge) ) resp <- Ok(BasicResult(true, "Task submitted")) } yield resp } } - } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/CustomFieldRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/CustomFieldRoutes.scala index 2a32b0be..83492974 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/CustomFieldRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/CustomFieldRoutes.scala @@ -38,7 +38,7 @@ object CustomFieldRoutes { val order = sort.getOrElse(CustomFieldOrder.NameAsc) for { fs <- backend.customFields.findAll( - user.account.collective, + user.account.collectiveId, param.map(_.q), order ) @@ -54,7 +54,7 @@ object CustomFieldRoutes { case GET -> Root / Ident(id) => (for { - field <- OptionT(backend.customFields.findById(user.account.collective, id)) + field <- OptionT(backend.customFields.findById(user.account.collectiveId, id)) res <- OptionT.liftF(Ok(convertField(field))) } yield res).getOrElseF(NotFound(BasicResult(false, "Not found"))) @@ -67,7 +67,7 @@ object CustomFieldRoutes { case DELETE -> Root / Ident(id) => for { - res <- backend.customFields.delete(user.account.collective, id) + res <- backend.customFields.delete(user.account.collectiveId, id) resp <- Ok(convertResult(res)) } yield resp } @@ -88,7 +88,7 @@ object CustomFieldRoutes { id, in.name, in.label, - user.account.collective, + user.account.collectiveId, in.ftype, Timestamp.Epoch ) @@ -101,7 +101,7 @@ object CustomFieldRoutes { in.name, in.label, in.ftype, - user.account.collective + user.account.collectiveId ) private def convertField(f: CustomFieldData): CustomField = diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/DownloadAllRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/DownloadAllRoutes.scala index 3d549e08..65cf2b7c 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/DownloadAllRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/DownloadAllRoutes.scala @@ -75,7 +75,7 @@ object DownloadAllRoutes { case req @ GET -> Root / "file" / Ident(id) => for { - data <- backend.downloadAll.getFile(share.account.collective, id) + data <- backend.downloadAll.getFile(share.account.collectiveId, id) resp <- BinaryUtil.respond(dsl, req)(data) } yield resp } @@ -113,13 +113,13 @@ object DownloadAllRoutes { case req @ GET -> Root / "file" / Ident(id) => for { - data <- backend.downloadAll.getFile(token.account.collective, id) + data <- backend.downloadAll.getFile(token.account.collectiveId, id) resp <- BinaryUtil.respond(dsl, req)(data) } yield resp case HEAD -> Root / "file" / Ident(id) => for { - data <- backend.downloadAll.getFile(token.account.collective, id) + data <- backend.downloadAll.getFile(token.account.collectiveId, id) resp <- BinaryUtil.respondHead(dsl)(data) } yield resp @@ -131,7 +131,7 @@ object DownloadAllRoutes { case PUT -> Root / "cancel" / Ident(id) => for { - res <- backend.downloadAll.cancelDownload(token.account, id) + res <- backend.downloadAll.cancelDownload(token.account.collectiveId, id) resp <- Ok(Conversions.basicResult(res)) } yield resp } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/EquipmentRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/EquipmentRoutes.scala index ce9e3217..b2102a91 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/EquipmentRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/EquipmentRoutes.scala @@ -33,7 +33,7 @@ object EquipmentRoutes { case GET -> Root :? QueryParam.QueryOpt(q) :? QueryParam.EquipSort(sort) => for { data <- backend.equipment.findAll( - user.account, + user.account.collectiveId, q.map(_.q), sort.getOrElse(OEquipment.EquipmentOrder.NameAsc) ) @@ -43,7 +43,7 @@ object EquipmentRoutes { case req @ POST -> Root => for { data <- req.as[Equipment] - equip <- newEquipment(data, user.account.collective) + equip <- newEquipment(data, user.account.collectiveId) res <- backend.equipment.add(equip) resp <- Ok(basicResult(res, "Equipment created")) } yield resp @@ -51,20 +51,20 @@ object EquipmentRoutes { case req @ PUT -> Root => for { data <- req.as[Equipment] - equip <- changeEquipment(data, user.account.collective) + equip <- changeEquipment(data, user.account.collectiveId) res <- backend.equipment.update(equip) resp <- Ok(basicResult(res, "Equipment updated.")) } yield resp case DELETE -> Root / Ident(id) => for { - del <- backend.equipment.delete(id, user.account.collective) + del <- backend.equipment.delete(id, user.account.collectiveId) resp <- Ok(basicResult(del, "Equipment deleted.")) } yield resp case GET -> Root / Ident(id) => (for { - equip <- OptionT(backend.equipment.find(user.account, id)) + equip <- OptionT(backend.equipment.find(user.account.collectiveId, id)) resp <- OptionT.liftF(Ok(mkEquipment(equip))) } yield resp).getOrElseF(NotFound()) } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/FolderRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/FolderRoutes.scala index b177f3c8..463f20e1 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/FolderRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/FolderRoutes.scala @@ -35,9 +35,15 @@ object FolderRoutes { QueryParam.OwningOpt(owning) +& QueryParam.FolderSort(sort) => val order = sort.getOrElse(OFolder.FolderOrder.NameAsc) val login = - owning.filter(identity).map(_ => user.account.user) + owning.filter(identity).map(_ => user.account.login) for { - all <- backend.folder.findAll(user.account, login, q.map(_.q), order) + all <- backend.folder.findAll( + user.account.collectiveId, + user.account.userId, + login, + q.map(_.q), + order + ) resp <- Ok(FolderList(all.map(mkFolder).toList)) } yield resp @@ -45,46 +51,48 @@ object FolderRoutes { for { data <- req.as[NewFolder] nfolder <- newFolder(data, user.account) - res <- backend.folder.add(nfolder, Some(user.account.user)) + res <- backend.folder.add(nfolder, None) resp <- Ok(Conversions.idResult(res, nfolder.id, "Folder successfully created.")) } yield resp case GET -> Root / Ident(id) => (for { - folder <- OptionT(backend.folder.findById(id, user.account)) + folder <- OptionT( + backend.folder.findById(id, user.account.collectiveId, user.account.userId) + ) resp <- OptionT.liftF(Ok(mkFolderDetail(folder))) } yield resp).getOrElseF(NotFound()) case req @ PUT -> Root / Ident(id) => for { data <- req.as[NewFolder] - res <- backend.folder.changeName(id, user.account, data.name) + res <- backend.folder.changeName(id, user.account.userId, data.name) resp <- Ok(mkFolderChangeResult(res)) } yield resp case DELETE -> Root / Ident(id) => for { - res <- backend.folder.delete(id, user.account) + res <- backend.folder.delete(id, user.account.userId) resp <- Ok(mkFolderChangeResult(res)) } yield resp case PUT -> Root / Ident(id) / "member" / Ident(userId) => for { - res <- backend.folder.addMember(id, user.account, userId) + res <- backend.folder.addMember(id, user.account.userId, userId) resp <- Ok(mkFolderChangeResult(res)) } yield resp case DELETE -> Root / Ident(id) / "member" / Ident(userId) => for { - res <- backend.folder.removeMember(id, user.account, userId) + res <- backend.folder.removeMember(id, user.account.userId, userId) resp <- Ok(mkFolderChangeResult(res)) } yield resp } } - private def newFolder[F[_]: Sync](ns: NewFolder, account: AccountId): F[RFolder] = - RFolder.newFolder(ns.name, account) + private def newFolder[F[_]: Sync](ns: NewFolder, account: AccountInfo): F[RFolder] = + RFolder.newFolder(ns.name, account.collectiveId, account.userId) private def mkFolder(item: OFolder.FolderItem): FolderItem = FolderItem( diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/FullTextIndexRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/FullTextIndexRoutes.scala index 1a512693..4933f777 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/FullTextIndexRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/FullTextIndexRoutes.scala @@ -33,7 +33,9 @@ object FullTextIndexRoutes { HttpRoutes.of { case POST -> Root / "reIndex" => for { - res <- backend.fulltext.reindexCollective(user.account).attempt + res <- backend.fulltext + .reindexCollective(user.account.collectiveId, user.account.userId.some) + .attempt resp <- Ok(Conversions.basicResult(res, "Full-text index will be re-created.")) } yield resp } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/IntegrationEndpointRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/IntegrationEndpointRoutes.scala index 3777f18a..f95588b3 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/IntegrationEndpointRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/IntegrationEndpointRoutes.scala @@ -9,14 +9,12 @@ package docspell.restserver.routes import cats.data.{EitherT, OptionT} import cats.effect._ import cats.implicits._ - import docspell.backend.BackendApp import docspell.common._ import docspell.restserver.Config import docspell.restserver.conv.Conversions._ import docspell.restserver.http4s.Responses -import docspell.store.records.RItem - +import docspell.store.records.{RCollective, RItem} import org.http4s._ import org.http4s.circe.CirceEntityEncoder._ import org.http4s.dsl.Http4sDsl @@ -36,15 +34,15 @@ object IntegrationEndpointRoutes { for { _ <- authRequest(req, cfg.integrationEndpoint) _ <- checkEnabled(cfg.integrationEndpoint) - _ <- lookupCollective(collective, backend) - } yield () + c <- lookupCollective(collective, backend) + } yield c HttpRoutes.of { case req @ POST -> Root / "item" / Ident(collective) => (for { - _ <- validate(req, collective) + coll <- validate(req, collective) res <- EitherT.liftF[F, Response[F], Response[F]]( - uploadFile(collective, backend, cfg, dsl)(req) + uploadFile(coll.id, backend, cfg, dsl)(req) ) } yield res).fold(identity, identity) @@ -56,9 +54,9 @@ object IntegrationEndpointRoutes { case req @ GET -> Root / "checkfile" / Ident(collective) / checksum => (for { - _ <- validate(req, collective) + coll <- validate(req, collective) items <- EitherT.liftF[F, Response[F], Vector[RItem]]( - backend.itemSearch.findByFileCollective(checksum, collective) + backend.itemSearch.findByFileCollective(checksum, coll.id) ) resp <- EitherT.liftF[F, Response[F], Response[F]](Ok(CheckFileRoutes.convert(items))) @@ -86,14 +84,13 @@ object IntegrationEndpointRoutes { def lookupCollective[F[_]: Async]( coll: Ident, backend: BackendApp[F] - ): EitherT[F, Response[F], Unit] = - for { - opt <- EitherT.liftF(backend.collective.find(coll)) - res <- EitherT.cond[F](opt.exists(_.integrationEnabled), (), Response.notFound[F]) - } yield res + ): EitherT[F, Response[F], RCollective] = + OptionT(backend.collective.find(coll)) + .filter(_.integrationEnabled) + .toRight(Response.notFound[F]) def uploadFile[F[_]: Async]( - coll: Ident, + cid: CollectiveId, backend: BackendApp[F], cfg: Config, dsl: Http4sDsl[F] @@ -110,8 +107,7 @@ object IntegrationEndpointRoutes { cfg.integrationEndpoint.priority, cfg.backend.files.validMimeTypes ) - account = AccountId(coll, DocspellSystem.user) - result <- backend.upload.submit(updata, account, None) + result <- backend.upload.submit(updata, cid, None, None) res <- Ok(basicResult(result)) } yield res } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/ItemLinkRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/ItemLinkRoutes.scala index 2469c99e..097f162e 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/ItemLinkRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/ItemLinkRoutes.scala @@ -22,7 +22,7 @@ import org.http4s.HttpRoutes import org.http4s.circe.CirceEntityCodec._ import org.http4s.dsl.Http4sDsl -class ItemLinkRoutes[F[_]: Async](account: AccountId, backend: OItemLink[F]) +class ItemLinkRoutes[F[_]: Async](account: AccountInfo, backend: OItemLink[F]) extends Http4sDsl[F] { def get: HttpRoutes[F] = HttpRoutes.of { @@ -36,7 +36,7 @@ class ItemLinkRoutes[F[_]: Async](account: AccountId, backend: OItemLink[F]) case DELETE -> Root / Ident(target) / Ident(id) => for { - _ <- backend.removeAll(account.collective, target, NonEmptyList.of(id)) + _ <- backend.removeAll(account.collectiveId, target, NonEmptyList.of(id)) resp <- Ok(BasicResult(true, "Related items removed")) } yield resp @@ -46,7 +46,7 @@ class ItemLinkRoutes[F[_]: Async](account: AccountId, backend: OItemLink[F]) related = NonEmptyList.fromList(input.related) res <- OptionT .fromOption[F](related) - .semiflatMap(backend.addAll(account.collective, input.item, _)) + .semiflatMap(backend.addAll(account.collectiveId, input.item, _)) .value resp <- Ok(convertResult(res)) } yield resp @@ -56,7 +56,7 @@ class ItemLinkRoutes[F[_]: Async](account: AccountId, backend: OItemLink[F]) input <- req.as[ItemLinkData] related = NonEmptyList.fromList(input.related) _ <- related - .map(backend.removeAll(account.collective, input.item, _)) + .map(backend.removeAll(account.collectiveId, input.item, _)) .getOrElse( BadRequest(BasicResult(false, "List of related items must not be empty")) ) @@ -77,6 +77,6 @@ class ItemLinkRoutes[F[_]: Async](account: AccountId, backend: OItemLink[F]) object ItemLinkRoutes { - def apply[F[_]: Async](account: AccountId, itemLink: OItemLink[F]): HttpRoutes[F] = + def apply[F[_]: Async](account: AccountInfo, itemLink: OItemLink[F]): HttpRoutes[F] = new ItemLinkRoutes[F](account, itemLink).get } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/ItemMultiRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/ItemMultiRoutes.scala index 1a8ebd6a..cc65ac0c 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/ItemMultiRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/ItemMultiRoutes.scala @@ -8,7 +8,6 @@ package docspell.restserver.routes import cats.effect._ import cats.implicits._ - import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.backend.ops.OCustomFields.{RemoveValue, SetValue} @@ -17,7 +16,7 @@ import docspell.restapi.model._ import docspell.restserver.Config import docspell.restserver.conv.{Conversions, MultiIdSupport, NonEmptyListSupport} import docspell.restserver.http4s.ClientRequestInfo - +import docspell.scheduler.usertask.UserTaskScope import org.http4s.HttpRoutes import org.http4s.circe.CirceEntityDecoder._ import org.http4s.circe.CirceEntityEncoder._ @@ -42,7 +41,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { res <- backend.item.setStates( data, ItemState.Confirmed, - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Item data confirmed")) } yield resp @@ -54,7 +53,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { res <- backend.item.setStates( data, ItemState.Created, - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Item back to created.")) } yield resp @@ -66,7 +65,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { res <- backend.item.setTagsMultipleItems( items, json.refs, - user.account.collective + user.account.collectiveId ) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) @@ -80,7 +79,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { res <- backend.item.linkTagsMultipleItems( items, json.refs, - user.account.collective + user.account.collectiveId ) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) @@ -94,7 +93,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { res <- backend.item.removeTagsMultipleItems( items, json.refs, - user.account.collective + user.account.collectiveId ) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) @@ -108,7 +107,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { res <- backend.item.setNameMultiple( items, json.name.notEmpty.getOrElse(""), - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Name updated")) } yield resp @@ -120,7 +119,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { res <- backend.item.setFolderMultiple( items, json.ref.map(_.id), - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Folder updated")) } yield resp @@ -129,7 +128,11 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { for { json <- req.as[ItemsAndDirection] items <- requireNonEmpty(json.items) - res <- backend.item.setDirection(items, json.direction, user.account.collective) + res <- backend.item.setDirection( + items, + json.direction, + user.account.collectiveId + ) resp <- Ok(Conversions.basicResult(res, "Direction updated")) } yield resp @@ -137,7 +140,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { for { json <- req.as[ItemsAndDate] items <- requireNonEmpty(json.items) - res <- backend.item.setItemDate(items, json.date, user.account.collective) + res <- backend.item.setItemDate(items, json.date, user.account.collectiveId) resp <- Ok(Conversions.basicResult(res, "Item date updated")) } yield resp @@ -145,7 +148,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { for { json <- req.as[ItemsAndDate] items <- requireNonEmpty(json.items) - res <- backend.item.setItemDueDate(items, json.date, user.account.collective) + res <- backend.item.setItemDueDate(items, json.date, user.account.collectiveId) resp <- Ok(Conversions.basicResult(res, "Item due date updated")) } yield resp @@ -153,7 +156,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { for { json <- req.as[ItemsAndRef] items <- requireNonEmpty(json.items) - res <- backend.item.setCorrOrg(items, json.ref, user.account.collective) + res <- backend.item.setCorrOrg(items, json.ref, user.account.collectiveId) resp <- Ok(Conversions.basicResult(res, "Correspondent organization updated")) } yield resp @@ -161,7 +164,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { for { json <- req.as[ItemsAndRef] items <- requireNonEmpty(json.items) - res <- backend.item.setCorrPerson(items, json.ref, user.account.collective) + res <- backend.item.setCorrPerson(items, json.ref, user.account.collectiveId) resp <- Ok(Conversions.basicResult(res, "Correspondent person updated")) } yield resp @@ -169,7 +172,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { for { json <- req.as[ItemsAndRef] items <- requireNonEmpty(json.items) - res <- backend.item.setConcPerson(items, json.ref, user.account.collective) + res <- backend.item.setConcPerson(items, json.ref, user.account.collectiveId) resp <- Ok(Conversions.basicResult(res, "Concerned person updated")) } yield resp @@ -177,7 +180,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { for { json <- req.as[ItemsAndRef] items <- requireNonEmpty(json.items) - res <- backend.item.setConcEquip(items, json.ref, user.account.collective) + res <- backend.item.setConcEquip(items, json.ref, user.account.collectiveId) resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated")) } yield resp @@ -185,7 +188,11 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { for { json <- req.as[IdList] items <- requireNonEmpty(json.ids) - res <- backend.item.reprocessAll(items, user.account) + res <- backend.item.reprocessAll( + user.account.collectiveId, + items, + UserTaskScope(user.account) + ) resp <- Ok(Conversions.basicResult(res, "Re-process task(s) submitted.")) } yield resp @@ -193,7 +200,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { for { json <- req.as[IdList] items <- requireNonEmpty(json.ids) - n <- backend.item.setDeletedState(items, user.account.collective) + n <- backend.item.setDeletedState(items, user.account.collectiveId) res = BasicResult( n > 0, if (n > 0) "Item(s) deleted" else "Item deletion failed." @@ -205,7 +212,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { for { json <- req.as[IdList] items <- requireNonEmpty(json.ids) - res <- backend.item.restore(items, user.account.collective) + res <- backend.item.restore(items, user.account.collectiveId) resp <- Ok(Conversions.basicResult(res, "Item(s) deleted")) } yield resp @@ -215,7 +222,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { items <- requireNonEmpty(json.items) res <- backend.customFields.setValueMultiple( items, - SetValue(json.field.field, json.field.value, user.account.collective) + SetValue(json.field.field, json.field.value, user.account.collectiveId) ) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) @@ -228,7 +235,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { items <- requireNonEmpty(json.items) field <- readId[F](json.name) res <- backend.customFields.deleteValue( - RemoveValue(field, items, user.account.collective) + RemoveValue(field, items, user.account.collectiveId) ) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) @@ -239,7 +246,7 @@ object ItemMultiRoutes extends NonEmptyListSupport with MultiIdSupport { for { json <- req.as[IdList] items <- requireNonEmpty(json.ids) - res <- backend.item.merge(logger, items, user.account.collective) + res <- backend.item.merge(logger, items, user.account.collectiveId) resp <- Ok(Conversions.basicResult(res, "Items merged")) } yield resp } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/ItemRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/ItemRoutes.scala index 14dd23c9..57db1d9e 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/ItemRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/ItemRoutes.scala @@ -9,7 +9,6 @@ package docspell.restserver.routes import cats.data.NonEmptyList import cats.effect._ import cats.implicits._ - import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.backend.ops.OCustomFields.{RemoveValue, SetValue} @@ -21,7 +20,7 @@ import docspell.restserver.http4s.BinaryUtil import docspell.restserver.http4s.ClientRequestInfo import docspell.restserver.http4s.Responses import docspell.restserver.http4s.{QueryParam => QP} - +import docspell.scheduler.usertask.UserTaskScope import org.http4s.HttpRoutes import org.http4s.circe.CirceEntityDecoder._ import org.http4s.circe.CirceEntityEncoder._ @@ -43,7 +42,7 @@ object ItemRoutes { HttpRoutes.of { case GET -> Root / Ident(id) => for { - item <- backend.itemSearch.findItem(id, user.account.collective) + item <- backend.itemSearch.findItem(id, user.account.collectiveId) result = item.map(Conversions.mkItemDetail) resp <- result @@ -53,26 +52,30 @@ object ItemRoutes { case POST -> Root / Ident(id) / "confirm" => for { - res <- backend.item.setState(id, ItemState.Confirmed, user.account.collective) + res <- backend.item.setState( + id, + ItemState.Confirmed, + user.account.collectiveId + ) resp <- Ok(Conversions.basicResult(res, "Item data confirmed")) } yield resp case POST -> Root / Ident(id) / "unconfirm" => for { - res <- backend.item.setState(id, ItemState.Created, user.account.collective) + res <- backend.item.setState(id, ItemState.Created, user.account.collectiveId) resp <- Ok(Conversions.basicResult(res, "Item back to created.")) } yield resp case POST -> Root / Ident(id) / "restore" => for { - res <- backend.item.restore(NonEmptyList.of(id), user.account.collective) + res <- backend.item.restore(NonEmptyList.of(id), user.account.collectiveId) resp <- Ok(Conversions.basicResult(res, "Item restored.")) } yield resp case req @ PUT -> Root / Ident(id) / "tags" => for { tags <- req.as[StringList].map(_.items) - res <- backend.item.setTags(id, tags, user.account.collective) + res <- backend.item.setTags(id, tags, user.account.collectiveId) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) resp <- Ok(Conversions.basicResult(res.value, "Tags updated")) @@ -81,8 +84,8 @@ object ItemRoutes { case req @ POST -> Root / Ident(id) / "tags" => for { data <- req.as[Tag] - rtag <- Conversions.newTag(data, user.account.collective) - res <- backend.item.addNewTag(user.account.collective, id, rtag) + rtag <- Conversions.newTag(data, user.account.collectiveId) + res <- backend.item.addNewTag(user.account.collectiveId, id, rtag) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) resp <- Ok(Conversions.basicResult(res.value, "Tag added.")) @@ -91,7 +94,7 @@ object ItemRoutes { case req @ PUT -> Root / Ident(id) / "taglink" => for { tags <- req.as[StringList] - res <- backend.item.linkTags(id, tags.items, user.account.collective) + res <- backend.item.linkTags(id, tags.items, user.account.collectiveId) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) resp <- Ok(Conversions.basicResult(res.value, "Tags linked")) @@ -100,7 +103,7 @@ object ItemRoutes { case req @ POST -> Root / Ident(id) / "tagtoggle" => for { tags <- req.as[StringList] - res <- backend.item.toggleTags(id, tags.items, user.account.collective) + res <- backend.item.toggleTags(id, tags.items, user.account.collectiveId) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) resp <- Ok(Conversions.basicResult(res.value, "Tags linked")) @@ -112,7 +115,7 @@ object ItemRoutes { res <- backend.item.removeTagsMultipleItems( NonEmptyList.of(id), json.items, - user.account.collective + user.account.collectiveId ) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) @@ -125,7 +128,7 @@ object ItemRoutes { res <- backend.item.setDirection( NonEmptyList.of(id), dir.direction, - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Direction updated")) } yield resp @@ -133,7 +136,11 @@ object ItemRoutes { case req @ PUT -> Root / Ident(id) / "folder" => for { idref <- req.as[OptionalId] - res <- backend.item.setFolder(id, idref.id.map(_.id), user.account.collective) + res <- backend.item.setFolder( + id, + idref.id.map(_.id), + user.account.collectiveId + ) resp <- Ok(Conversions.basicResult(res, "Folder updated")) } yield resp @@ -143,7 +150,7 @@ object ItemRoutes { res <- backend.item.setCorrOrg( NonEmptyList.of(id), idref.id, - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Correspondent organization updated")) } yield resp @@ -151,7 +158,7 @@ object ItemRoutes { case req @ POST -> Root / Ident(id) / "corrOrg" => for { data <- req.as[Organization] - org <- Conversions.newOrg(data, user.account.collective) + org <- Conversions.newOrg(data, user.account.collectiveId) res <- backend.item.addCorrOrg(id, org) resp <- Ok(Conversions.basicResult(res, "Correspondent organization updated")) } yield resp @@ -162,7 +169,7 @@ object ItemRoutes { res <- backend.item.setCorrPerson( NonEmptyList.of(id), idref.id, - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Correspondent person updated")) } yield resp @@ -170,7 +177,7 @@ object ItemRoutes { case req @ POST -> Root / Ident(id) / "corrPerson" => for { data <- req.as[Person] - pers <- Conversions.newPerson(data, user.account.collective) + pers <- Conversions.newPerson(data, user.account.collectiveId) res <- backend.item.addCorrPerson(id, pers) resp <- Ok(Conversions.basicResult(res, "Correspondent person updated")) } yield resp @@ -181,7 +188,7 @@ object ItemRoutes { res <- backend.item.setConcPerson( NonEmptyList.of(id), idref.id, - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Concerned person updated")) } yield resp @@ -189,7 +196,7 @@ object ItemRoutes { case req @ POST -> Root / Ident(id) / "concPerson" => for { data <- req.as[Person] - pers <- Conversions.newPerson(data, user.account.collective) + pers <- Conversions.newPerson(data, user.account.collectiveId) res <- backend.item.addConcPerson(id, pers) resp <- Ok(Conversions.basicResult(res, "Concerned person updated")) } yield resp @@ -200,7 +207,7 @@ object ItemRoutes { res <- backend.item.setConcEquip( NonEmptyList.of(id), idref.id, - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated")) } yield resp @@ -208,7 +215,7 @@ object ItemRoutes { case req @ POST -> Root / Ident(id) / "concEquipment" => for { data <- req.as[Equipment] - equip <- Conversions.newEquipment(data, user.account.collective) + equip <- Conversions.newEquipment(data, user.account.collectiveId) res <- backend.item.addConcEquip(id, equip) resp <- Ok(Conversions.basicResult(res, "Concerned equipment updated")) } yield resp @@ -216,7 +223,11 @@ object ItemRoutes { case req @ PUT -> Root / Ident(id) / "notes" => for { text <- req.as[OptionalText] - res <- backend.item.setNotes(id, text.text.notEmpty, user.account.collective) + res <- backend.item.setNotes( + id, + text.text.notEmpty, + user.account.collectiveId + ) resp <- Ok(Conversions.basicResult(res, "Notes updated")) } yield resp @@ -226,7 +237,7 @@ object ItemRoutes { res <- backend.item.setName( id, text.text.notEmpty.getOrElse(""), - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Name updated")) } yield resp @@ -238,7 +249,7 @@ object ItemRoutes { res <- backend.item.setItemDueDate( NonEmptyList.of(id), date.date, - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Item due date updated")) } yield resp @@ -250,14 +261,14 @@ object ItemRoutes { res <- backend.item.setItemDate( NonEmptyList.of(id), date.date, - user.account.collective + user.account.collectiveId ) resp <- Ok(Conversions.basicResult(res, "Item date updated")) } yield resp case GET -> Root / Ident(id) / "proposals" => for { - ml <- backend.item.getProposals(id, user.account.collective) + ml <- backend.item.getProposals(id, user.account.collectiveId) ip = Conversions.mkItemProposals(ml) resp <- Ok(ip) } yield resp @@ -274,7 +285,7 @@ object ItemRoutes { def notFound = NotFound(BasicResult(false, "Not found")) for { - preview <- backend.itemSearch.findItemPreview(id, user.account.collective) + preview <- backend.itemSearch.findItemPreview(id, user.account.collectiveId) inm = req.headers.get[`If-None-Match`].flatMap(_.tags) matches = BinaryUtil.matchETag(preview.map(_.meta), inm) fallback = flag.getOrElse(false) @@ -292,7 +303,7 @@ object ItemRoutes { case HEAD -> Root / Ident(id) / "preview" => for { - preview <- backend.itemSearch.findItemPreview(id, user.account.collective) + preview <- backend.itemSearch.findItemPreview(id, user.account.collectiveId) resp <- preview .map(data => BinaryUtil.withResponseHeaders(dsl, Ok())(data)) @@ -303,7 +314,12 @@ object ItemRoutes { for { data <- req.as[IdList] _ <- logger.debug(s"Re-process item ${id.id}") - res <- backend.item.reprocess(id, data.ids, user.account) + res <- backend.item.reprocess( + user.account.collectiveId, + id, + data.ids, + UserTaskScope(user.account) + ) resp <- Ok(Conversions.basicResult(res, "Re-process task submitted.")) } yield resp @@ -312,7 +328,7 @@ object ItemRoutes { data <- req.as[CustomFieldValue] res <- backend.customFields.setValue( id, - SetValue(data.field, data.value, user.account.collective) + SetValue(data.field, data.value, user.account.collectiveId) ) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) @@ -322,7 +338,7 @@ object ItemRoutes { case req @ DELETE -> Root / Ident(id) / "customfield" / Ident(fieldId) => for { res <- backend.customFields.deleteValue( - RemoveValue(fieldId, NonEmptyList.of(id), user.account.collective) + RemoveValue(fieldId, NonEmptyList.of(id), user.account.collectiveId) ) baseUrl = ClientRequestInfo.getBaseUrl(cfg, req) _ <- backend.notification.offerEvents(res.event(user.account, baseUrl.some)) @@ -333,7 +349,7 @@ object ItemRoutes { for { n <- backend.item.setDeletedState( NonEmptyList.of(id), - user.account.collective + user.account.collectiveId ) res = BasicResult( n > 0, diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/JobQueueRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/JobQueueRoutes.scala index 21e0ca53..25841f25 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/JobQueueRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/JobQueueRoutes.scala @@ -8,13 +8,12 @@ package docspell.restserver.routes import cats.effect._ import cats.implicits._ - import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.common.Ident import docspell.restapi.model.JobPriority import docspell.restserver.conv.Conversions - +import docspell.scheduler.usertask.UserTaskScope import org.http4s.HttpRoutes import org.http4s.circe.CirceEntityDecoder._ import org.http4s.circe.CirceEntityEncoder._ @@ -29,21 +28,21 @@ object JobQueueRoutes { HttpRoutes.of { case GET -> Root / "state" => for { - js <- backend.job.queueState(user.account.collective, 40) + js <- backend.job.queueState(UserTaskScope(user.account), 40) res = Conversions.mkJobQueueState(js) resp <- Ok(res) } yield resp case POST -> Root / Ident(id) / "cancel" => for { - result <- backend.job.cancelJob(id, user.account.collective) + result <- backend.job.cancelJob(id, UserTaskScope(user.account)) resp <- Ok(Conversions.basicResult(result)) } yield resp case req @ POST -> Root / Ident(id) / "priority" => for { prio <- req.as[JobPriority] - res <- backend.job.setPriority(id, user.account.collective, prio.priority) + res <- backend.job.setPriority(id, UserTaskScope(user.account), prio.priority) resp <- Ok(Conversions.basicResult(res, "Job priority changed")) } yield resp } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/LoginRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/LoginRoutes.scala index fa7cab5b..464bd149 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/LoginRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/LoginRoutes.scala @@ -96,7 +96,7 @@ object LoginRoutes { resp <- Ok( AuthResult( token.account.collective.id, - token.account.user.id, + token.account.login.id, true, "Login successful", Some(cd.asString), diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/MailSendRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/MailSendRoutes.scala index f0e44909..15ceceac 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/MailSendRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/MailSendRoutes.scala @@ -32,7 +32,9 @@ object MailSendRoutes { for { in <- req.as[SimpleMail] mail = convertIn(id, in) - res <- mail.traverse(m => backend.mail.sendMail(user.account, name, m)) + res <- mail.traverse(m => + backend.mail.sendMail(user.account.userId, user.account.collectiveId, name, m) + ) resp <- res.fold( err => Ok(BasicResult(false, s"Invalid mail data: $err")), res => Ok(convertOut(res)) diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/MailSettingsRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/MailSettingsRoutes.scala index 4f0cd169..ff391d67 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/MailSettingsRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/MailSettingsRoutes.scala @@ -35,27 +35,27 @@ object MailSettingsRoutes { HttpRoutes.of { case GET -> Root / "smtp" :? QueryParam.QueryOpt(q) => for { - list <- backend.mail.getSmtpSettings(user.account, q.map(_.q)) + list <- backend.mail.getSmtpSettings(user.account.userId, q.map(_.q)) res = list.map(convert) resp <- Ok(EmailSettingsList(res.toList)) } yield resp case GET -> Root / "imap" :? QueryParam.QueryOpt(q) => for { - list <- backend.mail.getImapSettings(user.account, q.map(_.q)) + list <- backend.mail.getImapSettings(user.account.userId, q.map(_.q)) res = list.map(convert) resp <- Ok(ImapSettingsList(res.toList)) } yield resp case GET -> Root / "smtp" / Ident(name) => (for { - ems <- backend.mail.findSmtpSettings(user.account, name) + ems <- backend.mail.findSmtpSettings(user.account.userId, name) resp <- OptionT.liftF(Ok(convert(ems))) } yield resp).getOrElseF(NotFound()) case GET -> Root / "imap" / Ident(name) => (for { - ems <- backend.mail.findImapSettings(user.account, name) + ems <- backend.mail.findImapSettings(user.account.userId, name) resp <- OptionT.liftF(Ok(convert(ems))) } yield resp).getOrElseF(NotFound()) @@ -64,7 +64,7 @@ object MailSettingsRoutes { in <- OptionT.liftF(req.as[EmailSettings]) ru = makeSmtpSettings(in) up <- OptionT.liftF( - ru.traverse(r => backend.mail.createSmtpSettings(user.account, r)) + ru.traverse(r => backend.mail.createSmtpSettings(user.account.userId, r)) ) resp <- OptionT.liftF( Ok( @@ -81,7 +81,7 @@ object MailSettingsRoutes { in <- OptionT.liftF(req.as[ImapSettings]) ru = makeImapSettings(in) up <- OptionT.liftF( - ru.traverse(r => backend.mail.createImapSettings(user.account, r)) + ru.traverse(r => backend.mail.createImapSettings(user.account.userId, r)) ) resp <- OptionT.liftF( Ok( @@ -98,7 +98,9 @@ object MailSettingsRoutes { in <- OptionT.liftF(req.as[EmailSettings]) ru = makeSmtpSettings(in) up <- OptionT.liftF( - ru.traverse(r => backend.mail.updateSmtpSettings(user.account, name, r)) + ru.traverse(r => + backend.mail.updateSmtpSettings(user.account.userId, name, r) + ) ) resp <- OptionT.liftF( Ok( @@ -117,7 +119,9 @@ object MailSettingsRoutes { in <- OptionT.liftF(req.as[ImapSettings]) ru = makeImapSettings(in) up <- OptionT.liftF( - ru.traverse(r => backend.mail.updateImapSettings(user.account, name, r)) + ru.traverse(r => + backend.mail.updateImapSettings(user.account.userId, name, r) + ) ) resp <- OptionT.liftF( Ok( @@ -133,7 +137,7 @@ object MailSettingsRoutes { case DELETE -> Root / "smtp" / Ident(name) => for { - n <- backend.mail.deleteSmtpSettings(user.account, name) + n <- backend.mail.deleteSmtpSettings(user.account.userId, name) resp <- Ok( if (n > 0) BasicResult(true, "Mail settings removed") else BasicResult(false, "Mail settings could not be removed") @@ -142,7 +146,7 @@ object MailSettingsRoutes { case DELETE -> Root / "imap" / Ident(name) => for { - n <- backend.mail.deleteImapSettings(user.account, name) + n <- backend.mail.deleteImapSettings(user.account.userId, name) resp <- Ok( if (n > 0) BasicResult(true, "Mail settings removed") else BasicResult(false, "Mail settings could not be removed") diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/NotificationRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/NotificationRoutes.scala index e0c9f0bd..2e5940d0 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/NotificationRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/NotificationRoutes.scala @@ -50,14 +50,14 @@ object NotificationRoutes extends NonEmptyListSupport { HttpRoutes.of { case GET -> Root => for { - list <- backend.notification.listChannels(user.account) + list <- backend.notification.listChannels(user.account.userId) data = list.map(NotificationChannel.convert) resp <- Ok(data) } yield resp case DELETE -> Root / Ident(id) => for { - res <- backend.notification.deleteChannel(id, user.account) + res <- backend.notification.deleteChannel(id, user.account.userId) resp <- Ok(Conversions.basicResult(res, "Channel deleted")) } yield resp @@ -69,7 +69,7 @@ object NotificationRoutes extends NonEmptyListSupport { .fromEither[F](ch) .semiflatMap { c => backend.notification - .createChannel(c, user.account) + .createChannel(c, user.account.userId) .map(res => Conversions.basicResult(res, "Channel created")) } .foldF(ex => BadRequest(BasicResult(false, ex.getMessage)), Ok(_)) @@ -83,7 +83,7 @@ object NotificationRoutes extends NonEmptyListSupport { .fromEither[F](ch) .semiflatMap { c => backend.notification - .updateChannel(c, user.account) + .updateChannel(c, user.account.userId) .map(res => Conversions.basicResult(res, "Channel created")) } .foldF(ex => BadRequest(BasicResult(false, ex.getMessage)), Ok(_)) @@ -102,14 +102,14 @@ object NotificationRoutes extends NonEmptyListSupport { HttpRoutes.of { case GET -> Root => for { - list <- backend.notification.listHooks(user.account) + list <- backend.notification.listHooks(user.account.userId) data = list.map(Converters.convertHook) resp <- Ok(data) } yield resp case DELETE -> Root / Ident(id) => for { - res <- backend.notification.deleteHook(id, user.account) + res <- backend.notification.deleteHook(id, user.account.userId) resp <- Ok(Conversions.basicResult(res, "Hook deleted.")) } yield resp @@ -117,7 +117,7 @@ object NotificationRoutes extends NonEmptyListSupport { for { input <- req.as[NotificationHook] hook <- Sync[F].pure(Converters.convertHook(input)).rethrow - res <- backend.notification.createHook(hook, user.account) + res <- backend.notification.createHook(hook, user.account.userId) resp <- Ok(Conversions.basicResult(res, "Hook created")) } yield resp @@ -125,7 +125,7 @@ object NotificationRoutes extends NonEmptyListSupport { for { input <- req.as[NotificationHook] hook <- Sync[F].pure(Converters.convertHook(input)).rethrow - res <- backend.notification.updateHook(hook, user.account) + res <- backend.notification.updateHook(hook, user.account.userId) resp <- Ok(Conversions.basicResult(res, "Hook updated")) } yield resp @@ -187,7 +187,7 @@ object NotificationRoutes extends NonEmptyListSupport { NotificationHook( h.id, h.enabled, - h.channels.map(c => NotificationChannelRef(c.id, c.channelType, c.name)).toList, + h.channels.map(c => NotificationChannelRef(c.id, c.channelType, c.name)), h.allEvents, h.eventFilter, h.events diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/NotifyDueItemsRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/NotifyDueItemsRoutes.scala index dc180ba0..c09cd1f4 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/NotifyDueItemsRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/NotifyDueItemsRoutes.scala @@ -110,7 +110,7 @@ object NotifyDueItemsRoutes extends MailAddressCodec with NonEmptyListSupport { def makeTask[F[_]: Sync]( id: Ident, baseUrl: LenientUri, - user: AccountId, + user: AccountInfo, settings: PeriodicDueItemsSettings ): F[UserTask[PeriodicDueItemsArgs]] = requireNonEmpty(settings.channels).map { ch => diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/OrganizationRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/OrganizationRoutes.scala index 25d0218b..0f8536e4 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/OrganizationRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/OrganizationRoutes.scala @@ -36,7 +36,7 @@ object OrganizationRoutes { if (full.getOrElse(false)) for { data <- backend.organization.findAllOrg( - user.account, + user.account.collectiveId, q.map(_.q), order ) @@ -44,14 +44,18 @@ object OrganizationRoutes { } yield resp else for { - data <- backend.organization.findAllOrgRefs(user.account, q.map(_.q), order) + data <- backend.organization.findAllOrgRefs( + user.account.collectiveId, + q.map(_.q), + order + ) resp <- Ok(ReferenceList(data.map(mkIdName).toList)) } yield resp case req @ POST -> Root => for { data <- req.as[Organization] - newOrg <- newOrg(data, user.account.collective) + newOrg <- newOrg(data, user.account.collectiveId) added <- backend.organization.addOrg(newOrg) resp <- Ok(basicResult(added, "New organization saved.")) } yield resp @@ -59,23 +63,22 @@ object OrganizationRoutes { case req @ PUT -> Root => for { data <- req.as[Organization] - upOrg <- changeOrg(data, user.account.collective) + upOrg <- changeOrg(data, user.account.collectiveId) update <- backend.organization.updateOrg(upOrg) resp <- Ok(basicResult(update, "Organization updated.")) } yield resp case DELETE -> Root / Ident(id) => for { - delOrg <- backend.organization.deleteOrg(id, user.account.collective) + delOrg <- backend.organization.deleteOrg(id, user.account.collectiveId) resp <- Ok(basicResult(delOrg, "Organization deleted.")) } yield resp case GET -> Root / Ident(id) => (for { - org <- OptionT(backend.organization.findOrg(user.account, id)) + org <- OptionT(backend.organization.findOrg(user.account.collectiveId, id)) resp <- OptionT.liftF(Ok(mkOrg(org))) } yield resp).getOrElseF(NotFound()) } } - } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/PeriodicQueryRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/PeriodicQueryRoutes.scala index 0a300130..0245216a 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/PeriodicQueryRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/PeriodicQueryRoutes.scala @@ -112,7 +112,7 @@ object PeriodicQueryRoutes extends MailAddressCodec with NonEmptyListSupport { def makeTask[F[_]: Sync]( id: Ident, baseUrl: LenientUri, - user: AccountId, + user: AccountInfo, settings: PeriodicQuerySettings ): F[UserTask[PeriodicQueryArgs]] = Sync[F] diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/PersonRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/PersonRoutes.scala index 0f893571..6e033754 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/PersonRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/PersonRoutes.scala @@ -37,7 +37,7 @@ object PersonRoutes { if (full.getOrElse(false)) for { data <- backend.organization.findAllPerson( - user.account, + user.account.collectiveId, q.map(_.q), order ) @@ -46,7 +46,7 @@ object PersonRoutes { else for { data <- backend.organization.findAllPersonRefs( - user.account, + user.account.collectiveId, q.map(_.q), order ) @@ -56,7 +56,7 @@ object PersonRoutes { case req @ POST -> Root => for { data <- req.as[Person] - newPer <- newPerson(data, user.account.collective) + newPer <- newPerson(data, user.account.collectiveId) added <- backend.organization.addPerson(newPer) resp <- Ok(basicResult(added, "New person saved.")) } yield resp @@ -64,7 +64,7 @@ object PersonRoutes { case req @ PUT -> Root => for { data <- req.as[Person] - upPer <- changePerson(data, user.account.collective) + upPer <- changePerson(data, user.account.collectiveId) update <- backend.organization.updatePerson(upPer) resp <- Ok(basicResult(update, "Person updated.")) } yield resp @@ -72,16 +72,15 @@ object PersonRoutes { case DELETE -> Root / Ident(id) => for { _ <- logger.debug(s"Deleting person ${id.id}") - delOrg <- backend.organization.deletePerson(id, user.account.collective) + delOrg <- backend.organization.deletePerson(id, user.account.collectiveId) resp <- Ok(basicResult(delOrg, "Person deleted.")) } yield resp case GET -> Root / Ident(id) => (for { - org <- OptionT(backend.organization.findPerson(user.account, id)) + org <- OptionT(backend.organization.findPerson(user.account.collectiveId, id)) resp <- OptionT.liftF(Ok(mkPerson(org))) } yield resp).getOrElseF(NotFound()) } } - } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/ScanMailboxRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/ScanMailboxRoutes.scala index 61f27c11..563eb795 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/ScanMailboxRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/ScanMailboxRoutes.scala @@ -35,7 +35,7 @@ object ScanMailboxRoutes { HttpRoutes.of { case GET -> Root => ut.getScanMailbox(UserTaskScope(user.account)) - .evalMap(task => taskToSettings(user.account, backend, task)) + .evalMap(task => taskToSettings(user.account.userId, backend, task)) .compile .toVector .map(v => ScanMailboxSettingsList(v.toList)) @@ -44,7 +44,7 @@ object ScanMailboxRoutes { case GET -> Root / Ident(id) => (for { task <- ut.findScanMailbox(id, UserTaskScope(user.account)) - res <- OptionT.liftF(taskToSettings(user.account, backend, task)) + res <- OptionT.liftF(taskToSettings(user.account.userId, backend, task)) resp <- OptionT.liftF(Ok(res)) } yield resp).getOrElseF(NotFound()) @@ -102,7 +102,7 @@ object ScanMailboxRoutes { def makeTask[F[_]: Sync]( id: Ident, - user: AccountId, + user: AccountInfo, settings: ScanMailboxSettings ): F[UserTask[ScanMailboxArgs]] = Sync[F].pure( @@ -133,14 +133,14 @@ object ScanMailboxRoutes { ) def taskToSettings[F[_]: Sync]( - account: AccountId, + userId: Ident, backend: BackendApp[F], task: UserTask[ScanMailboxArgs] ): F[ScanMailboxSettings] = for { conn <- backend.mail - .getImapSettings(account, None) + .getImapSettings(userId, None) .map( _.find(_.name == task.args.imapConnection) .map(_.name) diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/SentMailRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/SentMailRoutes.scala index 16045709..9b016e28 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/SentMailRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/SentMailRoutes.scala @@ -30,19 +30,19 @@ object SentMailRoutes { HttpRoutes.of { case GET -> Root / "item" / Ident(id) => for { - all <- backend.mail.getSentMailsForItem(user.account, id) + all <- backend.mail.getSentMailsForItem(user.account.collectiveId, id) resp <- Ok(SentMails(all.map(convert).toList)) } yield resp case GET -> Root / "mail" / Ident(mailId) => (for { - mail <- backend.mail.getSentMail(user.account, mailId) + mail <- backend.mail.getSentMail(user.account.collectiveId, mailId) resp <- OptionT.liftF(Ok(convert(mail))) } yield resp).getOrElseF(NotFound()) case DELETE -> Root / "mail" / Ident(mailId) => for { - n <- backend.mail.deleteSentMail(user.account, mailId) + n <- backend.mail.deleteSentMail(user.account.collectiveId, mailId) resp <- Ok(BasicResult(n > 0, s"Mails deleted: $n")) } yield resp } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/ShareRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/ShareRoutes.scala index 2b16263b..a8fd11da 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/ShareRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/ShareRoutes.scala @@ -35,9 +35,9 @@ object ShareRoutes { HttpRoutes.of { case GET -> Root :? QP.Query(q) :? QP.OwningFlag(owning) => - val login = if (owning) Some(user.account.user) else None + val login = if (owning) Some(user.account.login) else None for { - all <- backend.share.findAll(user.account.collective, login, q) + all <- backend.share.findAll(user.account.collectiveId, login, q) now <- Timestamp.current[F] res <- Ok(ShareList(all.map(mkShareDetail(now)))) } yield res @@ -52,7 +52,7 @@ object ShareRoutes { case GET -> Root / Ident(id) => (for { - share <- backend.share.findOne(id, user.account.collective) + share <- backend.share.findOne(id, user.account.collectiveId) now <- OptionT.liftF(Timestamp.current[F]) resp <- OptionT.liftF(Ok(mkShareDetail(now)(share))) } yield resp).getOrElseF(NotFound()) @@ -67,7 +67,7 @@ object ShareRoutes { case DELETE -> Root / Ident(id) => for { - del <- backend.share.delete(id, user.account.collective) + del <- backend.share.delete(id, user.account.collectiveId) resp <- Ok(BasicResult(del, if (del) "Share deleted." else "Deleting failed.")) } yield resp @@ -75,7 +75,10 @@ object ShareRoutes { for { in <- req.as[SimpleShareMail] mail = convertIn(in) - res <- mail.traverse(m => backend.share.sendMail(user.account, name, m)) + res <- mail.traverse(m => + backend.share + .sendMail(user.account.collectiveId, user.account.userId, name, m) + ) resp <- res.fold( err => Ok(BasicResult(false, s"Invalid mail data: $err")), res => Ok(convertOut(res)) @@ -111,7 +114,7 @@ object ShareRoutes { def mkNewShare(data: ShareData, user: AuthToken): OShare.NewShare = OShare.NewShare( - user.account, + user.account.asAccountId, data.name, data.query, data.enabled, @@ -159,7 +162,7 @@ object ShareRoutes { ShareDetail( r.share.id, r.share.query, - IdName(r.user.uid, r.user.login.id), + IdName(r.account.userId, r.account.login.id), r.share.name, r.share.enabled, r.share.publishAt, diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/SourceRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/SourceRoutes.scala index f67fd772..9e1a621a 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/SourceRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/SourceRoutes.scala @@ -30,14 +30,14 @@ object SourceRoutes { HttpRoutes.of { case GET -> Root => for { - all <- backend.source.findAll(user.account) + all <- backend.source.findAll(user.account.collectiveId) res <- Ok(SourceList(all.map(mkSource).toList)) } yield res case req @ POST -> Root => for { data <- req.as[SourceTagIn] - src <- newSource(data.source, user.account.collective) + src <- newSource(data.source, user.account.collectiveId) added <- backend.source.add(src, data.tags) resp <- Ok(basicResult(added, "Source added.")) } yield resp @@ -45,14 +45,14 @@ object SourceRoutes { case req @ PUT -> Root => for { data <- req.as[SourceTagIn] - src = changeSource(data.source, user.account.collective) + src = changeSource(data.source, user.account.collectiveId) updated <- backend.source.update(src, data.tags) resp <- Ok(basicResult(updated, "Source updated.")) } yield resp case DELETE -> Root / Ident(id) => for { - del <- backend.source.delete(id, user.account.collective) + del <- backend.source.delete(id, user.account.collectiveId) resp <- Ok(basicResult(del, "Source deleted.")) } yield resp } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/TagRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/TagRoutes.scala index e503b034..e42356c7 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/TagRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/TagRoutes.scala @@ -32,7 +32,7 @@ object TagRoutes { case GET -> Root :? QueryParam.QueryOpt(q) :? QueryParam.TagSort(sort) => for { all <- backend.tag.findAll( - user.account, + user.account.collectiveId, q.map(_.q), sort.getOrElse(TagOrder.NameAsc) ) @@ -42,7 +42,7 @@ object TagRoutes { case req @ POST -> Root => for { data <- req.as[Tag] - tag <- newTag(data, user.account.collective) + tag <- newTag(data, user.account.collectiveId) res <- backend.tag.add(tag) resp <- Ok(basicResult(res, "Tag successfully created.")) } yield resp @@ -50,14 +50,14 @@ object TagRoutes { case req @ PUT -> Root => for { data <- req.as[Tag] - tag = changeTag(data, user.account.collective) + tag = changeTag(data, user.account.collectiveId) res <- backend.tag.update(tag) resp <- Ok(basicResult(res, "Tag successfully updated.")) } yield resp case DELETE -> Root / Ident(id) => for { - del <- backend.tag.delete(id, user.account.collective) + del <- backend.tag.delete(id, user.account.collectiveId) resp <- Ok(basicResult(del, "Tag successfully deleted.")) } yield resp } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/UploadRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/UploadRoutes.scala index 3fe1c379..20bd3484 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/UploadRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/UploadRoutes.scala @@ -33,7 +33,13 @@ object UploadRoutes { val dsl = new Http4sDsl[F] with ResponseGenerator[F] {} import dsl._ - val submitting = submitFiles[F](backend, cfg, Right(user.account)) _ + val submitting = + submitFiles[F]( + backend, + cfg, + Right(user.account.collectiveId), + user.account.userId.some + ) _ HttpRoutes.of { case req @ POST -> Root / "item" => @@ -53,7 +59,7 @@ object UploadRoutes { (for { _ <- OptionT(backend.collective.findEnabledSource(srcId)) res <- OptionT.liftF( - submitFiles(backend, cfg, Left(srcId))(req, None, Priority.Low, dsl) + submitFiles(backend, cfg, Left(srcId), None)(req, None, Priority.Low, dsl) ) } yield res).getOrElseF(NotFound()) @@ -61,7 +67,12 @@ object UploadRoutes { (for { _ <- OptionT(backend.collective.findEnabledSource(srcId)) res <- OptionT.liftF( - submitFiles(backend, cfg, Left(srcId))(req, Some(itemId), Priority.Low, dsl) + submitFiles(backend, cfg, Left(srcId), None)( + req, + Some(itemId), + Priority.Low, + dsl + ) ) } yield res).getOrElseF(NotFound()) } @@ -70,7 +81,8 @@ object UploadRoutes { private def submitFiles[F[_]: Async]( backend: BackendApp[F], cfg: Config, - accOrSrc: Either[Ident, AccountId] + accOrSrc: Either[Ident, CollectiveId], + userId: Option[Ident] )( req: Request[F], itemId: Option[Ident], @@ -96,7 +108,7 @@ object UploadRoutes { prio, cfg.backend.files.validMimeTypes ) - result <- backend.upload.submitEither(updata, accOrSrc, itemId) + result <- backend.upload.submitEither(updata, accOrSrc, userId, itemId) res <- Ok(basicResult(result)) } yield res } diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/UserRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/UserRoutes.scala index 591c124b..784a423a 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/UserRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/UserRoutes.scala @@ -8,14 +8,13 @@ package docspell.restserver.routes import cats.effect._ import cats.implicits._ - import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.backend.ops.OCollective import docspell.common._ import docspell.restapi.model._ import docspell.restserver.conv.Conversions._ - +import docspell.store.UpdateResult import org.http4s.HttpRoutes import org.http4s.circe.CirceEntityDecoder._ import org.http4s.circe.CirceEntityEncoder._ @@ -32,7 +31,8 @@ object UserRoutes { for { data <- req.as[PasswordChange] res <- backend.collective.changePassword( - user.account, + user.account.collectiveId, + user.account.userId, data.currentPassword, data.newPassword ) @@ -41,14 +41,14 @@ object UserRoutes { case GET -> Root => for { - all <- backend.collective.listUser(user.account.collective) + all <- backend.collective.listUser(user.account.collectiveId) res <- Ok(UserList(all.map(mkUser).toList)) } yield res case req @ POST -> Root => for { data <- req.as[User] - nuser <- newUser(data, user.account.collective) + nuser <- newUser(data, user.account.collectiveId) added <- backend.collective.add(nuser) resp <- Ok(basicResult(added, "User created.")) } yield resp @@ -56,25 +56,35 @@ object UserRoutes { case req @ PUT -> Root => for { data <- req.as[User] - nuser = changeUser(data, user.account.collective) + nuser = changeUser(data, user.account.collectiveId) update <- backend.collective.update(nuser) resp <- Ok(basicResult(update, "User updated.")) } yield resp case DELETE -> Root / Ident(id) => for { - ar <- backend.collective.deleteUser(id, user.account.collective) + users <- backend.collective.listUser(user.account.collectiveId) + ar <- + if (users.exists(_.uid == id)) backend.collective.deleteUser(id) + else UpdateResult.notFound.pure[F] resp <- Ok(basicResult(ar, "User deleted.")) } yield resp case GET -> Root / Ident(username) / "deleteData" => for { - data <- backend.collective.getDeleteUserData( - AccountId(user.account.collective, username) - ) - resp <- Ok( - DeleteUserData(data.ownedFolders, data.sentMails, data.shares) - ) + users <- backend.collective.listUser(user.account.collectiveId) + userToDelete = users.find(u => u.login == username || u.uid == username) + resp <- userToDelete match { + case Some(user) => + backend.collective + .getDeleteUserData(user.cid, user.uid) + .flatMap(data => + Ok(DeleteUserData(data.ownedFolders, data.sentMails, data.shares)) + ) + + case None => + NotFound(BasicResult(false, s"User '${username.id}' not found")) + } } yield resp } } diff --git a/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala index febbc0e1..42ba54b2 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala @@ -10,10 +10,9 @@ import cats.effect.Async import cats.implicits._ import fs2.concurrent.Topic import fs2.{Pipe, Stream} - import docspell.backend.BackendApp import docspell.backend.auth.AuthToken - +import docspell.scheduler.usertask.UserTaskScope import org.http4s.HttpRoutes import org.http4s.dsl.Http4sDsl import org.http4s.server.websocket.WebSocketBuilder2 @@ -34,7 +33,7 @@ object WebSocketRoutes { HttpRoutes.of { case GET -> Root => val init = for { - jc <- backend.job.getUnfinishedJobCount(user.account.collective) + jc <- backend.job.getUnfinishedJobCount(UserTaskScope(user.account)) msg = OutputEvent.JobsWaiting(user.account.collective, jc) } yield Text(msg.encode) diff --git a/modules/store/src/main/scala/db/migration/MigrationTasks.scala b/modules/store/src/main/scala/db/migration/MigrationTasks.scala index ec237f55..7a803e3c 100644 --- a/modules/store/src/main/scala/db/migration/MigrationTasks.scala +++ b/modules/store/src/main/scala/db/migration/MigrationTasks.scala @@ -16,7 +16,11 @@ import docspell.notification.api._ import docspell.store.queries.QLogin import docspell.store.records._ -import db.migration.data._ +import db.migration.data.{ + PeriodicDueItemsArgs => PeriodicDueItemsArgsLegacy, + PeriodicQueryArgs => PeriodicQueryArgsLegacy, + _ +} import doobie._ import doobie.implicits._ import doobie.util.transactor.Strategy @@ -75,8 +79,8 @@ trait MigrationTasks { ref.flatMap(channelRef => RPeriodicTask.updateTask( old.id, - PeriodicQueryArgs.taskName, - PeriodicQueryArgs( + PeriodicQueryArgsLegacy.taskName, + PeriodicQueryArgsLegacy( oldArgs.account, NonEmptyList.of(channelRef), oldArgs.query, @@ -105,8 +109,8 @@ trait MigrationTasks { ref.flatMap(channelRef => RPeriodicTask.updateTask( old.id, - PeriodicDueItemsArgs.taskName, - PeriodicDueItemsArgs( + PeriodicDueItemsArgsLegacy.taskName, + PeriodicDueItemsArgsLegacy( oldArgs.account, NonEmptyList.of(channelRef), oldArgs.remindDays, @@ -147,7 +151,7 @@ trait MigrationTasks { RPeriodicTask .updateTask( old.id, - PeriodicDueItemsArgs.taskName, + PeriodicDueItemsArgsLegacy.taskName, a.asJson.noSpaces ) ) @@ -163,7 +167,7 @@ trait MigrationTasks { private def convertArgs( old: NotifyDueItemsArgs - ): OptionT[ConnectionIO, PeriodicDueItemsArgs] = { + ): OptionT[ConnectionIO, PeriodicDueItemsArgsLegacy] = { val recs = old.recipients .map(MailAddress.parse) .flatMap { @@ -188,7 +192,7 @@ trait MigrationTasks { now ) _ <- OptionT.liftF(RNotificationChannelMail.insert(ch)) - args = PeriodicDueItemsArgs( + args = PeriodicDueItemsArgsLegacy( old.account, NonEmptyList.of(ChannelRef(ch.id, ChannelType.Mail, chName)), old.remindDays, diff --git a/modules/store/src/main/scala/db/migration/data/PeriodicDueItemsArgs.scala b/modules/store/src/main/scala/db/migration/data/PeriodicDueItemsArgs.scala new file mode 100644 index 00000000..477924d6 --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/PeriodicDueItemsArgs.scala @@ -0,0 +1,43 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import cats.data.NonEmptyList +import docspell.common._ +import docspell.notification.api.ChannelRef +import io.circe.generic.semiauto +import io.circe.{Decoder, Encoder} + +/** Arguments to the notification task. + * + * This tasks queries items with a due date and informs the user via mail. + * + * If the structure changes, there must be some database migration to update or remove + * the json data of the corresponding task. + * + * @deprecated + * replaced with a version using `AccountInfo` + */ +final case class PeriodicDueItemsArgs( + account: AccountId, + channels: NonEmptyList[ChannelRef], + remindDays: Int, + daysBack: Option[Int], + tagsInclude: List[Ident], + tagsExclude: List[Ident], + baseUrl: Option[LenientUri] +) + +object PeriodicDueItemsArgs { + val taskName = Ident.unsafe("periodic-due-items-notify2") + + implicit val jsonDecoder: Decoder[PeriodicDueItemsArgs] = + semiauto.deriveDecoder + + implicit val jsonEncoder: Encoder[PeriodicDueItemsArgs] = + semiauto.deriveEncoder +} diff --git a/modules/store/src/main/scala/db/migration/data/PeriodicQueryArgs.scala b/modules/store/src/main/scala/db/migration/data/PeriodicQueryArgs.scala new file mode 100644 index 00000000..5ff6d94e --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/PeriodicQueryArgs.scala @@ -0,0 +1,33 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import cats.data.NonEmptyList +import docspell.common._ +import docspell.notification.api.ChannelRef +import io.circe.generic.semiauto +import io.circe.{Decoder, Encoder} + +/** @deprecated replaced with a version using `AccountInfo` */ +final case class PeriodicQueryArgs( + account: AccountId, + channels: NonEmptyList[ChannelRef], + query: Option[ItemQueryString], + bookmark: Option[String], + baseUrl: Option[LenientUri], + contentStart: Option[String] +) + +object PeriodicQueryArgs { + val taskName = Ident.unsafe("periodic-query-notify2") + + implicit val jsonDecoder: Decoder[PeriodicQueryArgs] = + semiauto.deriveDecoder + + implicit def jsonEncoder: Encoder[PeriodicQueryArgs] = + semiauto.deriveEncoder +} diff --git a/modules/store/src/main/scala/db/migration/data/ScanMailboxArgs.scala b/modules/store/src/main/scala/db/migration/data/ScanMailboxArgs.scala new file mode 100644 index 00000000..64334523 --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/ScanMailboxArgs.scala @@ -0,0 +1,69 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import docspell.common._ +import docspell.common.syntax.all._ + +import io.circe._ +import io.circe.generic.semiauto._ + +/** Arguments to the poll-mailbox task. + * + * This tasks queries user mailboxes and pushes found mails into docspell for processing. + * + * If the structure changes, there must be some database migration to update or remove + * the json data of the corresponding task. + * + * @deprecated + * replaced with a version using `AccountInfo` + */ +case class ScanMailboxArgs( + // the docspell user account + account: AccountId, + // the configured imap connection + imapConnection: Ident, + // scan folders recursively + scanRecursively: Option[Boolean], + // what folders to search + folders: List[String], + // only select mails received since then + receivedSince: Option[Duration], + // move submitted mails to another folder + targetFolder: Option[String], + // delete the after submitting (only if targetFolder is None) + deleteMail: Boolean, + // set the direction when submitting + direction: Option[Direction], + // set a folder for items + itemFolder: Option[Ident], + // set a filter for files when importing archives + fileFilter: Option[Glob], + // set a list of tags to apply to new item + tags: Option[List[String]], + // a glob filter for the mail subject + subjectFilter: Option[Glob], + // the language for extraction and analysis + language: Option[Language], + // apply additional filter to all mails or only imported + postHandleAll: Option[Boolean], + // Exclude the mail body when importing + attachmentsOnly: Option[Boolean] +) + +object ScanMailboxArgs { + + val taskName = Ident.unsafe("scan-mailbox") + + implicit val jsonEncoder: Encoder[ScanMailboxArgs] = + deriveEncoder[ScanMailboxArgs] + implicit val jsonDecoder: Decoder[ScanMailboxArgs] = + deriveDecoder[ScanMailboxArgs] + + def parse(str: String): Either[Throwable, ScanMailboxArgs] = + str.parseJsonAs[ScanMailboxArgs] +} diff --git a/modules/store/src/main/scala/docspell/store/records/RFolder.scala b/modules/store/src/main/scala/docspell/store/records/RFolder.scala index b4ef19b3..37f9c856 100644 --- a/modules/store/src/main/scala/docspell/store/records/RFolder.scala +++ b/modules/store/src/main/scala/docspell/store/records/RFolder.scala @@ -30,12 +30,12 @@ object RFolder { def newFolder[F[_]: Sync]( name: String, collective: CollectiveId, - user: Ident + ownerUserId: Ident ): F[RFolder] = for { nId <- Ident.randomId[F] now <- Timestamp.current[F] - } yield RFolder(nId, name, collective, user, now) + } yield RFolder(nId, name, collective, ownerUserId, now) final case class Table(alias: Option[String]) extends TableDef { val tableName = "folder" From 816cca7ea2f6fa443149893f9e13a0e3ff418759 Mon Sep 17 00:00:00 2001 From: eikek Date: Thu, 4 Aug 2022 13:42:33 +0200 Subject: [PATCH 06/15] Adopt joex to new collective-id --- .../docspell/backend/ops/ODownloadAll.scala | 2 +- .../backend/task/DownloadZipArgs.scala | 3 +- .../scala/docspell/joex/JoexAppImpl.scala | 2 +- .../joex/addon/GenericItemAddonTask.scala | 4 +- .../docspell/joex/analysis/NerFile.scala | 12 ++-- .../docspell/joex/analysis/RegexNerFile.scala | 22 +++---- .../joex/download/DownloadZipTask.scala | 10 ++-- .../joex/emptytrash/EmptyTrashTask.scala | 2 +- .../scala/docspell/joex/fts/FtsWork.scala | 6 +- .../docspell/joex/fts/MigrationTask.scala | 5 +- .../scala/docspell/joex/fts/ReIndexTask.scala | 2 +- .../docspell/joex/learn/ClassifierName.scala | 12 ++-- .../scala/docspell/joex/learn/Classify.scala | 2 +- .../joex/learn/LearnClassifierTask.scala | 10 ++-- .../joex/learn/LearnItemEntities.scala | 12 ++-- .../scala/docspell/joex/learn/LearnTags.scala | 4 +- .../docspell/joex/learn/SelectItems.scala | 15 +++-- .../joex/learn/StoreClassifierModel.scala | 2 +- .../multiupload/MultiUploadArchiveTask.scala | 4 +- .../joex/notify/PeriodicDueItemsTask.scala | 7 ++- .../joex/notify/PeriodicQueryTask.scala | 16 ++++- .../docspell/joex/notify/TaskOperations.scala | 8 +-- .../joex/pagecount/AllPageCountTask.scala | 9 ++- .../joex/pdfconv/ConvertAllPdfTask.scala | 8 ++- .../joex/preview/AllPreviewsTask.scala | 18 +++--- .../joex/process/AttachmentPreview.scala | 2 +- .../docspell/joex/process/TextAnalysis.scala | 2 +- .../joex/process/TextExtraction.scala | 4 +- .../joex/scanmailbox/ScanMailboxTask.scala | 23 +++++--- .../joex/updatecheck/UpdateCheckTask.scala | 8 +-- .../impl/context/TagsChangedCtxTest.scala | 2 +- .../db/migration/data/DownloadZipArgs.scala | 58 +++++++++++++++++++ .../store/records/RNotificationChannel.scala | 2 +- .../docspell/store/records/RUserEmail.scala | 7 --- .../docspell/store/fts/TempFtsOpsTest.scala | 10 ++-- 35 files changed, 194 insertions(+), 121 deletions(-) create mode 100644 modules/store/src/main/scala/db/migration/data/DownloadZipArgs.scala diff --git a/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala b/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala index 99d4a9c0..a99c170c 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala @@ -86,7 +86,7 @@ object ODownloadAll { ): F[DownloadSummary] = for { _ <- logger.info(s"Download all request: $req") summary <- getSummary(account, req) - args = DownloadZipArgs(account.asAccountId, req) + args = DownloadZipArgs(account, req) _ <- OptionT .whenF(summary.state == DownloadState.NotPresent) { JobFactory diff --git a/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala b/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala index f8c12c70..6e689d57 100644 --- a/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala +++ b/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala @@ -8,11 +8,10 @@ package docspell.backend.task import docspell.backend.ops.ODownloadAll.model.DownloadRequest import docspell.common._ - import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} -final case class DownloadZipArgs(accountId: AccountId, req: DownloadRequest) +final case class DownloadZipArgs(account: AccountInfo, req: DownloadRequest) object DownloadZipArgs { val taskName: Ident = Ident.unsafe("download-query-zip") diff --git a/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala b/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala index 2a6669ee..830a6aba 100644 --- a/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala +++ b/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala @@ -84,7 +84,7 @@ final class JoexAppImpl[F[_]: Async]( .evalMap { es => val args = EmptyTrashArgs(es.cid, es.minAge) uts.updateOneTask( - UserTaskScope(args.collective), + UserTaskScope.collective(args.collective), args.makeSubject.some, EmptyTrashTask.userTask(args, es.schedule) ) diff --git a/modules/joex/src/main/scala/docspell/joex/addon/GenericItemAddonTask.scala b/modules/joex/src/main/scala/docspell/joex/addon/GenericItemAddonTask.scala index 68b2d775..37cc0ef7 100644 --- a/modules/joex/src/main/scala/docspell/joex/addon/GenericItemAddonTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/addon/GenericItemAddonTask.scala @@ -49,7 +49,7 @@ object GenericItemAddonTask extends LoggerExtension { trigger: AddonTriggerType, addonTaskIds: Set[Ident] )( - collective: Ident, + collective: CollectiveId, data: ItemData, maybeMeta: Option[ProcessItemArgs.ProcessMeta] ): Task[F, Unit, ItemData] = @@ -63,7 +63,7 @@ object GenericItemAddonTask extends LoggerExtension { trigger: AddonTriggerType, addonTaskIds: Set[Ident] )( - collective: Ident, + collective: CollectiveId, data: ItemData, maybeMeta: Option[ProcessItemArgs.ProcessMeta] ): Task[F, Unit, ExecResult] = diff --git a/modules/joex/src/main/scala/docspell/joex/analysis/NerFile.scala b/modules/joex/src/main/scala/docspell/joex/analysis/NerFile.scala index ea0500c6..52f51ea9 100644 --- a/modules/joex/src/main/scala/docspell/joex/analysis/NerFile.scala +++ b/modules/joex/src/main/scala/docspell/joex/analysis/NerFile.scala @@ -18,7 +18,7 @@ import docspell.store.queries.QCollective import io.circe.generic.semiauto._ import io.circe.{Decoder, Encoder} -case class NerFile(collective: Ident, updated: Timestamp, creation: Timestamp) { +case class NerFile(collective: CollectiveId, updated: Timestamp, creation: Timestamp) { def nerFilePath(directory: Path): Path = NerFile.nerFilePath(directory, collective) @@ -33,14 +33,14 @@ object NerFile { implicit val jsonEncoder: Encoder[NerFile] = deriveEncoder[NerFile] - private def nerFilePath(directory: Path, collective: Ident): Path = - directory.resolve(s"${collective.id}.txt") + private def nerFilePath(directory: Path, collective: CollectiveId): Path = + directory.resolve(s"${collective.value}.txt") - private def jsonFilePath(directory: Path, collective: Ident): Path = - directory.resolve(s"${collective.id}.json") + private def jsonFilePath(directory: Path, collective: CollectiveId): Path = + directory.resolve(s"${collective.value}.json") def find[F[_]: Async]( - collective: Ident, + collective: CollectiveId, directory: Path ): F[Option[NerFile]] = { val file = jsonFilePath(directory, collective) diff --git a/modules/joex/src/main/scala/docspell/joex/analysis/RegexNerFile.scala b/modules/joex/src/main/scala/docspell/joex/analysis/RegexNerFile.scala index 8d3f3562..8801bff3 100644 --- a/modules/joex/src/main/scala/docspell/joex/analysis/RegexNerFile.scala +++ b/modules/joex/src/main/scala/docspell/joex/analysis/RegexNerFile.scala @@ -24,7 +24,7 @@ import io.circe.syntax._ /** Maintains a custom regex-ner file per collective for stanford's regexner annotator. */ trait RegexNerFile[F[_]] { - def makeFile(collective: Ident): F[Option[Path]] + def makeFile(collective: CollectiveId): F[Option[Path]] } @@ -49,11 +49,11 @@ object RegexNerFile { private[this] val logger = docspell.logging.getLogger[F] - def makeFile(collective: Ident): F[Option[Path]] = + def makeFile(collective: CollectiveId): F[Option[Path]] = if (cfg.maxEntries > 0) doMakeFile(collective) else (None: Option[Path]).pure[F] - def doMakeFile(collective: Ident): F[Option[Path]] = + def doMakeFile(collective: CollectiveId): F[Option[Path]] = for { now <- Timestamp.current[F] existing <- NerFile.find[F](collective, cfg.directory) @@ -75,7 +75,7 @@ object RegexNerFile { } yield result private def updateFile( - collective: Ident, + collective: CollectiveId, now: Timestamp, current: Option[NerFile] ): F[Option[Path]] = @@ -95,7 +95,7 @@ object RegexNerFile { ) *> cur.pure[F] else logger.debug( - s"There have been state changes for collective '${collective.id}'. Reload NER file." + s"There have been state changes for collective '${collective.value}'. Reload NER file." ) *> createFile(lup, collective, now) nerf.map(_.nerFilePath(cfg.directory).some) case None => @@ -119,7 +119,7 @@ object RegexNerFile { private def createFile( lastUpdate: Timestamp, - collective: Ident, + collective: CollectiveId, now: Timestamp ): F[NerFile] = { def update(nf: NerFile, text: String): F[Unit] = @@ -127,7 +127,7 @@ object RegexNerFile { for { jsonFile <- Sync[F].pure(nf.jsonFilePath(cfg.directory)) _ <- logger.debug( - s"Writing custom NER file for collective '${collective.id}'" + s"Writing custom NER file for collective '${collective.value}'" ) _ <- jsonFile.parent match { case Some(p) => File.mkDir(p) @@ -139,7 +139,9 @@ object RegexNerFile { ) for { - _ <- logger.info(s"Generating custom NER file for collective '${collective.id}'") + _ <- logger.info( + s"Generating custom NER file for collective '${collective.value}'" + ) names <- store.transact(QCollective.allNames(collective, cfg.maxEntries)) nerFile = NerFile(collective, lastUpdate, now) _ <- update(nerFile, NerFile.mkNerConfig(names)) @@ -152,8 +154,8 @@ object RegexNerFile { import docspell.store.qb.DSL._ import docspell.store.qb._ - def latestUpdate(collective: Ident): ConnectionIO[Option[Timestamp]] = { - def max_(col: Column[_], cidCol: Column[Ident]): Select = + def latestUpdate(collective: CollectiveId): ConnectionIO[Option[Timestamp]] = { + def max_(col: Column[_], cidCol: Column[CollectiveId]): Select = Select(max(col).as("t"), from(col.table), cidCol === collective) val sql = union( diff --git a/modules/joex/src/main/scala/docspell/joex/download/DownloadZipTask.scala b/modules/joex/src/main/scala/docspell/joex/download/DownloadZipTask.scala index 00d747d9..92856b9b 100644 --- a/modules/joex/src/main/scala/docspell/joex/download/DownloadZipTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/download/DownloadZipTask.scala @@ -35,7 +35,7 @@ object DownloadZipTask { ): Task[F, Args, Result] = Task { ctx => val req = ctx.args.req - val query = req.toQuery(ctx.args.accountId) + val query = req.toQuery(ctx.args.account) val allFiles = Stream @@ -53,7 +53,7 @@ object DownloadZipTask { .through(Zip[F](ctx.logger.some).zip(chunkSize)) .through( store.fileRepo.save( - ctx.args.accountId.collective, + ctx.args.account.collectiveId, FileCategory.DownloadAll, MimeTypeHint.advertised("application/zip") ) @@ -61,10 +61,10 @@ object DownloadZipTask { for { _ <- ctx.logger.info(s"Start zipping ${req.itemQueryString}") - summary <- downloadOps.getSummary(ctx.args.accountId, req) + summary <- downloadOps.getSummary(ctx.args.account, req) _ <- ctx.logger.debug(s"Summary: $summary") file <- storeZipFile.compile.lastOrError - row <- createRow(summary, ctx.args.accountId.collective, file) + row <- createRow(summary, ctx.args.account.collectiveId, file) _ <- ctx.logger.debug(s"Inserting zip file: $row") _ <- store.transact(RDownloadQuery.insert(row)) } yield Result(summary.fileCount) @@ -92,7 +92,7 @@ object DownloadZipTask { def createRow[F[_]: Sync]( summary: DownloadSummary, - cid: Ident, + cid: CollectiveId, file: FileKey ): F[RDownloadQuery] = Timestamp.current[F].map { now => diff --git a/modules/joex/src/main/scala/docspell/joex/emptytrash/EmptyTrashTask.scala b/modules/joex/src/main/scala/docspell/joex/emptytrash/EmptyTrashTask.scala index 177e961b..536fd83f 100644 --- a/modules/joex/src/main/scala/docspell/joex/emptytrash/EmptyTrashTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/emptytrash/EmptyTrashTask.scala @@ -68,7 +68,7 @@ object EmptyTrashTask { .foldMonoid private def deleteChunk[F[_]: Async]( - collective: Ident, + collective: CollectiveId, itemOps: OItem[F], ctx: Context[F, _] )(chunk: Vector[RItem]): F[Int] = diff --git a/modules/joex/src/main/scala/docspell/joex/fts/FtsWork.scala b/modules/joex/src/main/scala/docspell/joex/fts/FtsWork.scala index 3f5ed2a6..32f83e10 100644 --- a/modules/joex/src/main/scala/docspell/joex/fts/FtsWork.scala +++ b/modules/joex/src/main/scala/docspell/joex/fts/FtsWork.scala @@ -74,10 +74,10 @@ object FtsWork { def log[F[_]](f: Logger[F] => F[Unit]): FtsWork[F] = FtsWork(ctx => f(ctx.logger)) - def clearIndex[F[_]: FlatMap](coll: Option[Ident]): FtsWork[F] = + def clearIndex[F[_]: FlatMap](coll: Option[CollectiveId]): FtsWork[F] = coll match { case Some(cid) => - log[F](_.debug(s"Clearing index data for collective '${cid.id}'")) ++ FtsWork( + log[F](_.debug(s"Clearing index data for collective '${cid.value}'")) ++ FtsWork( ctx => ctx.fts.clear(ctx.logger, cid) ) case None => @@ -86,7 +86,7 @@ object FtsWork { ) } - def insertAll[F[_]: FlatMap](coll: Option[Ident]): FtsWork[F] = + def insertAll[F[_]: FlatMap](coll: Option[CollectiveId]): FtsWork[F] = log[F](_.info("Inserting all data to index")) ++ FtsWork .all( FtsWork(ctx => diff --git a/modules/joex/src/main/scala/docspell/joex/fts/MigrationTask.scala b/modules/joex/src/main/scala/docspell/joex/fts/MigrationTask.scala index ddb44a21..62cdc0e3 100644 --- a/modules/joex/src/main/scala/docspell/joex/fts/MigrationTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/fts/MigrationTask.scala @@ -8,11 +8,11 @@ package docspell.joex.fts import cats.effect._ import cats.implicits._ - import docspell.backend.fulltext.CreateIndex import docspell.common._ import docspell.ftsclient._ import docspell.joex.Config +import docspell.scheduler.usertask.UserTaskScope import docspell.scheduler.{Job, Task} import docspell.store.Store @@ -43,10 +43,9 @@ object MigrationTask { Job .createNew( taskName, - DocspellSystem.taskGroup, + UserTaskScope.system, (), "Create full-text index", - DocspellSystem.taskGroup, Priority.Low, Some(DocspellSystem.migrationTaskTracker) ) diff --git a/modules/joex/src/main/scala/docspell/joex/fts/ReIndexTask.scala b/modules/joex/src/main/scala/docspell/joex/fts/ReIndexTask.scala index b457d2fb..6f9885cb 100644 --- a/modules/joex/src/main/scala/docspell/joex/fts/ReIndexTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/fts/ReIndexTask.scala @@ -40,7 +40,7 @@ object ReIndexTask { def onCancel[F[_]]: Task[F, Args, Unit] = Task.log[F, Args](_.warn("Cancelling full-text re-index task")) - private def clearData[F[_]: Async](collective: Option[Ident]): FtsWork[F] = + private def clearData[F[_]: Async](collective: Option[CollectiveId]): FtsWork[F] = FtsWork.log[F](_.info("Clearing index data")) ++ (collective match { case Some(_) => diff --git a/modules/joex/src/main/scala/docspell/joex/learn/ClassifierName.scala b/modules/joex/src/main/scala/docspell/joex/learn/ClassifierName.scala index bce47659..3394d467 100644 --- a/modules/joex/src/main/scala/docspell/joex/learn/ClassifierName.scala +++ b/modules/joex/src/main/scala/docspell/joex/learn/ClassifierName.scala @@ -8,10 +8,8 @@ package docspell.joex.learn import cats.data.NonEmptyList import cats.implicits._ - -import docspell.common.Ident +import docspell.common.CollectiveId import docspell.store.records.{RClassifierModel, RClassifierSetting} - import doobie._ final class ClassifierName(val name: String) extends AnyVal @@ -37,12 +35,12 @@ object ClassifierName { val correspondentPerson: ClassifierName = apply("correspondentperson") - def findTagClassifiers[F[_]](coll: Ident): ConnectionIO[List[ClassifierName]] = + def findTagClassifiers(coll: CollectiveId): ConnectionIO[List[ClassifierName]] = for { categories <- RClassifierSetting.getActiveCategories(coll) } yield categories.map(tagCategory) - def findTagModels[F[_]](coll: Ident): ConnectionIO[List[RClassifierModel]] = + def findTagModels(coll: CollectiveId): ConnectionIO[List[RClassifierModel]] = for { categories <- RClassifierSetting.getActiveCategories(coll) models <- NonEmptyList.fromList(categories) match { @@ -53,7 +51,9 @@ object ClassifierName { } } yield models - def findOrphanTagModels[F[_]](coll: Ident): ConnectionIO[List[RClassifierModel]] = + def findOrphanTagModels( + coll: CollectiveId + ): ConnectionIO[List[RClassifierModel]] = for { cats <- RClassifierSetting.getActiveCategories(coll) allModels = RClassifierModel.findAllByQuery(coll, s"$categoryPrefix%") diff --git a/modules/joex/src/main/scala/docspell/joex/learn/Classify.scala b/modules/joex/src/main/scala/docspell/joex/learn/Classify.scala index 24244d6b..9812e619 100644 --- a/modules/joex/src/main/scala/docspell/joex/learn/Classify.scala +++ b/modules/joex/src/main/scala/docspell/joex/learn/Classify.scala @@ -26,7 +26,7 @@ object Classify { workingDir: Path, store: Store[F], classifier: TextClassifier[F], - coll: Ident, + coll: CollectiveId, text: String )(cname: ClassifierName): F[Option[String]] = (for { diff --git a/modules/joex/src/main/scala/docspell/joex/learn/LearnClassifierTask.scala b/modules/joex/src/main/scala/docspell/joex/learn/LearnClassifierTask.scala index 0de696c9..06e9ad83 100644 --- a/modules/joex/src/main/scala/docspell/joex/learn/LearnClassifierTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/learn/LearnClassifierTask.scala @@ -48,7 +48,7 @@ object LearnClassifierTask { .learnAll( analyser, store, - ctx.args.collective, + ctx.args.collectiveId, cfg.classification.itemCount, cfg.maxLength ) @@ -69,7 +69,7 @@ object LearnClassifierTask { _ <- OptionT.liftF( LearnTags .learnAllTagCategories(analyser, store)( - ctx.args.collective, + ctx.args.collectiveId, maxItems, cfg.maxLength ) @@ -82,7 +82,7 @@ object LearnClassifierTask { clearObsoleteTagModels(ctx, store) *> // when tags are deleted, categories may get removed. fix the json array store - .transact(RClassifierSetting.fixCategoryList(ctx.args.collective)) + .transact(RClassifierSetting.fixCategoryList(ctx.args.collectiveId)) .map(_ => ()) } @@ -92,7 +92,7 @@ object LearnClassifierTask { ): F[Unit] = for { list <- store.transact( - ClassifierName.findOrphanTagModels(ctx.args.collective) + ClassifierName.findOrphanTagModels(ctx.args.collectiveId) ) _ <- ctx.logger.info( s"Found ${list.size} obsolete model files that are deleted now." @@ -110,7 +110,7 @@ object LearnClassifierTask { cfg: Config.TextAnalysis ): OptionT[F, OCollective.Classifier] = if (cfg.classification.enabled) - OptionT(store.transact(RClassifierSetting.findById(ctx.args.collective))) + OptionT(store.transact(RClassifierSetting.findById(ctx.args.collectiveId))) .filter(_.autoTagEnabled) .map(OCollective.Classifier.fromRecord) else diff --git a/modules/joex/src/main/scala/docspell/joex/learn/LearnItemEntities.scala b/modules/joex/src/main/scala/docspell/joex/learn/LearnItemEntities.scala index 19048abd..d394b536 100644 --- a/modules/joex/src/main/scala/docspell/joex/learn/LearnItemEntities.scala +++ b/modules/joex/src/main/scala/docspell/joex/learn/LearnItemEntities.scala @@ -21,7 +21,7 @@ object LearnItemEntities { def learnAll[F[_]: Async, A]( analyser: TextAnalyser[F], store: Store[F], - collective: Ident, + collective: CollectiveId, maxItems: Int, maxTextLen: Int ): Task[F, A, Unit] = @@ -35,7 +35,7 @@ object LearnItemEntities { def learnCorrOrg[F[_]: Async, A]( analyser: TextAnalyser[F], store: Store[F], - collective: Ident, + collective: CollectiveId, maxItems: Int, maxTextLen: Int ): Task[F, A, Unit] = @@ -47,7 +47,7 @@ object LearnItemEntities { def learnCorrPerson[F[_]: Async, A]( analyser: TextAnalyser[F], store: Store[F], - collective: Ident, + collective: CollectiveId, maxItems: Int, maxTextLen: Int ): Task[F, A, Unit] = @@ -59,7 +59,7 @@ object LearnItemEntities { def learnConcPerson[F[_]: Async, A]( analyser: TextAnalyser[F], store: Store[F], - collective: Ident, + collective: CollectiveId, maxItems: Int, maxTextLen: Int ): Task[F, A, Unit] = @@ -71,7 +71,7 @@ object LearnItemEntities { def learnConcEquip[F[_]: Async, A]( analyser: TextAnalyser[F], store: Store[F], - collective: Ident, + collective: CollectiveId, maxItems: Int, maxTextLen: Int ): Task[F, A, Unit] = @@ -83,7 +83,7 @@ object LearnItemEntities { private def learn[F[_]: Async, A]( store: Store[F], analyser: TextAnalyser[F], - collective: Ident + collective: CollectiveId )(cname: ClassifierName, data: Context[F, _] => Stream[F, Data]): Task[F, A, Unit] = Task { ctx => ctx.logger.info(s"Learn classifier ${cname.name}") *> diff --git a/modules/joex/src/main/scala/docspell/joex/learn/LearnTags.scala b/modules/joex/src/main/scala/docspell/joex/learn/LearnTags.scala index e80fe83c..9745f2aa 100644 --- a/modules/joex/src/main/scala/docspell/joex/learn/LearnTags.scala +++ b/modules/joex/src/main/scala/docspell/joex/learn/LearnTags.scala @@ -21,7 +21,7 @@ object LearnTags { def learnTagCategory[F[_]: Async, A]( analyser: TextAnalyser[F], store: Store[F], - collective: Ident, + collective: CollectiveId, maxItems: Int, maxTextLen: Int )( @@ -44,7 +44,7 @@ object LearnTags { } def learnAllTagCategories[F[_]: Async, A](analyser: TextAnalyser[F], store: Store[F])( - collective: Ident, + collective: CollectiveId, maxItems: Int, maxTextLen: Int ): Task[F, A, Unit] = diff --git a/modules/joex/src/main/scala/docspell/joex/learn/SelectItems.scala b/modules/joex/src/main/scala/docspell/joex/learn/SelectItems.scala index a54d7f7e..ed127ac2 100644 --- a/modules/joex/src/main/scala/docspell/joex/learn/SelectItems.scala +++ b/modules/joex/src/main/scala/docspell/joex/learn/SelectItems.scala @@ -20,7 +20,7 @@ object SelectItems { val pageSep = LearnClassifierTask.pageSep val noClass = LearnClassifierTask.noClass - def forCategory[F[_]](store: Store[F], collective: Ident)( + def forCategory[F[_]](store: Store[F], collective: CollectiveId)( maxItems: Int, category: String, maxTextLen: Int @@ -36,7 +36,7 @@ object SelectItems { def forCorrOrg[F[_]]( store: Store[F], - collective: Ident, + collective: CollectiveId, maxItems: Int, maxTextLen: Int ): Stream[F, Data] = { @@ -51,7 +51,7 @@ object SelectItems { def forCorrPerson[F[_]]( store: Store[F], - collective: Ident, + collective: CollectiveId, maxItems: Int, maxTextLen: Int ): Stream[F, Data] = { @@ -66,7 +66,7 @@ object SelectItems { def forConcPerson[F[_]]( store: Store[F], - collective: Ident, + collective: CollectiveId, maxItems: Int, maxTextLen: Int ): Stream[F, Data] = { @@ -81,7 +81,7 @@ object SelectItems { def forConcEquip[F[_]]( store: Store[F], - collective: Ident, + collective: CollectiveId, maxItems: Int, maxTextLen: Int ): Stream[F, Data] = { @@ -94,7 +94,10 @@ object SelectItems { store.transact(connStream) } - private def allItems(collective: Ident, max: Int): Stream[ConnectionIO, Ident] = { + private def allItems( + collective: CollectiveId, + max: Int + ): Stream[ConnectionIO, Ident] = { val limit = if (max <= 0) Batch.all else Batch.limit(max) QItem.findAllNewesFirst(collective, 10, limit) } diff --git a/modules/joex/src/main/scala/docspell/joex/learn/StoreClassifierModel.scala b/modules/joex/src/main/scala/docspell/joex/learn/StoreClassifierModel.scala index 9d5aafe9..b3ff3261 100644 --- a/modules/joex/src/main/scala/docspell/joex/learn/StoreClassifierModel.scala +++ b/modules/joex/src/main/scala/docspell/joex/learn/StoreClassifierModel.scala @@ -21,7 +21,7 @@ object StoreClassifierModel { def handleModel[F[_]: Async]( store: Store[F], logger: Logger[F], - collective: Ident, + collective: CollectiveId, modelName: ClassifierName )( trainedModel: ClassifierModel diff --git a/modules/joex/src/main/scala/docspell/joex/multiupload/MultiUploadArchiveTask.scala b/modules/joex/src/main/scala/docspell/joex/multiupload/MultiUploadArchiveTask.scala index 988dd1a0..56438c44 100644 --- a/modules/joex/src/main/scala/docspell/joex/multiupload/MultiUploadArchiveTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/multiupload/MultiUploadArchiveTask.scala @@ -11,12 +11,12 @@ import cats.data.OptionT import cats.effect._ import cats.implicits._ import fs2.Stream - import docspell.backend.JobFactory import docspell.common._ import docspell.common.util.Zip import docspell.logging.Logger import docspell.scheduler._ +import docspell.scheduler.usertask.UserTaskScope import docspell.store.Store /** Task to submit multiple files at once. By default, one file in an upload results in @@ -90,7 +90,7 @@ object MultiUploadArchiveTask { submitter = currentJob.map(_.submitter).getOrElse(DocspellSystem.user) job <- JobFactory.processItem( args, - AccountId(ctx.args.meta.collective, submitter), + UserTaskScope(ctx.args.meta.collective, submitter.some), prio, None ) diff --git a/modules/joex/src/main/scala/docspell/joex/notify/PeriodicDueItemsTask.scala b/modules/joex/src/main/scala/docspell/joex/notify/PeriodicDueItemsTask.scala index 3f2576aa..2c8b2c33 100644 --- a/modules/joex/src/main/scala/docspell/joex/notify/PeriodicDueItemsTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/notify/PeriodicDueItemsTask.scala @@ -53,7 +53,12 @@ object PeriodicDueItemsTask { def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])( cont: Vector[NotificationChannel] => F[Unit] ): F[Unit] = - TaskOperations.withChannel(ctx.logger, ctx.args.channels, ctx.args.account, ops)(cont) + TaskOperations.withChannel( + ctx.logger, + ctx.args.channels, + ctx.args.account.userId, + ops + )(cont) def withItems[F[_]: Sync]( ctx: Context[F, Args], diff --git a/modules/joex/src/main/scala/docspell/joex/notify/PeriodicQueryTask.scala b/modules/joex/src/main/scala/docspell/joex/notify/PeriodicQueryTask.scala index fbfa127f..fe0e4b9b 100644 --- a/modules/joex/src/main/scala/docspell/joex/notify/PeriodicQueryTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/notify/PeriodicQueryTask.scala @@ -54,7 +54,12 @@ object PeriodicQueryTask { def withChannel[F[_]: Sync](ctx: Context[F, Args], ops: ONotification[F])( cont: Vector[NotificationChannel] => F[Unit] ): F[Unit] = - TaskOperations.withChannel(ctx.logger, ctx.args.channels, ctx.args.account, ops)(cont) + TaskOperations.withChannel( + ctx.logger, + ctx.args.channels, + ctx.args.account.userId, + ops + )(cont) private def queryString(q: ItemQuery.Expr) = ItemQueryParser.asString(q) @@ -64,7 +69,10 @@ object PeriodicQueryTask { ): F[Unit] = { def fromBookmark(id: String) = store - .transact(RQueryBookmark.findByNameOrId(ctx.args.account, id)) + .transact( + RQueryBookmark + .findByNameOrId(ctx.args.account.collectiveId, ctx.args.account.userId, id) + ) .map(_.map(_.query)) .flatTap(q => ctx.logger.debug(s"Loaded bookmark '$id': ${q.map(_.expr).map(queryString)}") @@ -72,7 +80,9 @@ object PeriodicQueryTask { def fromShare(id: String) = store - .transact(RShare.findOneByCollective(ctx.args.account.collective, Some(true), id)) + .transact( + RShare.findOneByCollective(ctx.args.account.collectiveId, Some(true), id) + ) .map(_.map(_.query)) .flatTap(q => ctx.logger.debug(s"Loaded share '$id': ${q.map(_.expr).map(queryString)}") diff --git a/modules/joex/src/main/scala/docspell/joex/notify/TaskOperations.scala b/modules/joex/src/main/scala/docspell/joex/notify/TaskOperations.scala index da91c374..029302bf 100644 --- a/modules/joex/src/main/scala/docspell/joex/notify/TaskOperations.scala +++ b/modules/joex/src/main/scala/docspell/joex/notify/TaskOperations.scala @@ -25,24 +25,24 @@ trait TaskOperations { def withChannel[F[_]: Sync]( logger: Logger[F], channelsIn: NonEmptyList[ChannelRef], - accountId: AccountId, + userId: Ident, ops: ONotification[F] )( cont: Vector[NotificationChannel] => F[Unit] ): F[Unit] = { val channels = - channelsIn.toList.toVector.flatTraverse(ops.findNotificationChannel(_, accountId)) + channelsIn.toList.toVector.flatTraverse(ops.findNotificationChannel(_, userId)) channels.flatMap { ch => if (ch.isEmpty) - logger.error(s"No channels found for the given data: ${channelsIn}") + logger.error(s"No channels found for the given data: $channelsIn") else cont(ch) } } def withEventContext[F[_]]( logger: Logger[F], - account: AccountId, + account: AccountInfo, baseUrl: Option[LenientUri], items: Vector[ListItem], contentStart: Option[String], diff --git a/modules/joex/src/main/scala/docspell/joex/pagecount/AllPageCountTask.scala b/modules/joex/src/main/scala/docspell/joex/pagecount/AllPageCountTask.scala index 1620b87f..cbb3d245 100644 --- a/modules/joex/src/main/scala/docspell/joex/pagecount/AllPageCountTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/pagecount/AllPageCountTask.scala @@ -9,10 +9,10 @@ package docspell.joex.pagecount import cats.effect._ import cats.implicits._ import fs2.{Chunk, Stream} - import docspell.backend.JobFactory import docspell.common._ import docspell.scheduler._ +import docspell.scheduler.usertask.UserTaskScope import docspell.store.Store import docspell.store.records.RAttachment @@ -51,12 +51,12 @@ object AllPageCountTask { .compile .foldMonoid - private def findAttachments[F[_]] = + private def findAttachments = RAttachment.findAllWithoutPageCount(50) private def createJobs[F[_]: Sync](ras: Chunk[RAttachment]): Stream[F, Job[String]] = { def mkJob(ra: RAttachment): F[Job[MakePageCountArgs]] = - JobFactory.makePageCount(MakePageCountArgs(ra.id), None) + JobFactory.makePageCount(MakePageCountArgs(ra.id), UserTaskScope.system) val jobs = ras.traverse(mkJob) Stream.evalUnChunk(jobs).map(_.encode) @@ -66,10 +66,9 @@ object AllPageCountTask { Job .createNew( AllPageCountTask.taskName, - DocspellSystem.taskGroup, + UserTaskScope.system, (), "Create all page-counts", - DocspellSystem.taskGroup, Priority.Low, Some(DocspellSystem.allPageCountTaskTracker) ) diff --git a/modules/joex/src/main/scala/docspell/joex/pdfconv/ConvertAllPdfTask.scala b/modules/joex/src/main/scala/docspell/joex/pdfconv/ConvertAllPdfTask.scala index 607da1f9..c1a8eeab 100644 --- a/modules/joex/src/main/scala/docspell/joex/pdfconv/ConvertAllPdfTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/pdfconv/ConvertAllPdfTask.scala @@ -9,9 +9,9 @@ package docspell.joex.pdfconv import cats.effect._ import cats.implicits._ import fs2.{Chunk, Stream} - import docspell.common._ import docspell.scheduler._ +import docspell.scheduler.usertask.UserTaskScope import docspell.store.Store import docspell.store.records.RAttachment @@ -55,7 +55,10 @@ object ConvertAllPdfTask { private def createJobs[F[_]: Sync]( ctx: Context[F, Args] )(ras: Chunk[RAttachment]): Stream[F, Job[String]] = { - val collectiveOrSystem = ctx.args.collective.getOrElse(DocspellSystem.taskGroup) + val collectiveOrSystem = + ctx.args.collective + .map(UserTaskScope.collective) + .getOrElse(UserTaskScope.system) def mkJob(ra: RAttachment): F[Job[PdfConvTask.Args]] = Job.createNew( @@ -63,7 +66,6 @@ object ConvertAllPdfTask { collectiveOrSystem, PdfConvTask.Args(ra.id), s"Convert pdf ${ra.id.id}/${ra.name.getOrElse("-")}", - collectiveOrSystem, Priority.Low, Some(PdfConvTask.taskName / ra.id) ) diff --git a/modules/joex/src/main/scala/docspell/joex/preview/AllPreviewsTask.scala b/modules/joex/src/main/scala/docspell/joex/preview/AllPreviewsTask.scala index c50d64a1..75c75d23 100644 --- a/modules/joex/src/main/scala/docspell/joex/preview/AllPreviewsTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/preview/AllPreviewsTask.scala @@ -9,11 +9,11 @@ package docspell.joex.preview import cats.effect._ import cats.implicits._ import fs2.{Chunk, Stream} - import docspell.backend.JobFactory import docspell.common.MakePreviewArgs.StoreMode import docspell.common._ import docspell.scheduler._ +import docspell.scheduler.usertask.UserTaskScope import docspell.store.Store import docspell.store.records.RAttachment @@ -64,15 +64,12 @@ object AllPreviewsTask { private def createJobs[F[_]: Sync]( ctx: Context[F, Args] )(ras: Chunk[RAttachment]): Stream[F, Job[MakePreviewArgs]] = { - val collectiveOrSystem = { - val cid = ctx.args.collective.getOrElse(DocspellSystem.taskGroup) - AccountId(cid, DocspellSystem.user) - } - def mkJob(ra: RAttachment): F[Job[MakePreviewArgs]] = JobFactory.makePreview( MakePreviewArgs(ra.id, ctx.args.storeMode), - collectiveOrSystem.some + ctx.args.collective + .map(UserTaskScope.collective) + .getOrElse(UserTaskScope.system) ) val jobs = ras.traverse(mkJob) @@ -81,8 +78,9 @@ object AllPreviewsTask { def job[F[_]: Sync]( storeMode: MakePreviewArgs.StoreMode, - cid: Option[Ident] + cid: Option[CollectiveId] ): F[Job[String]] = - JobFactory.allPreviews(AllPreviewsArgs(cid, storeMode), None).map(_.encode) - + JobFactory + .allPreviews(AllPreviewsArgs(cid, storeMode), UserTaskScope.system) + .map(_.encode) } diff --git a/modules/joex/src/main/scala/docspell/joex/process/AttachmentPreview.scala b/modules/joex/src/main/scala/docspell/joex/process/AttachmentPreview.scala index 98a2923b..4426d741 100644 --- a/modules/joex/src/main/scala/docspell/joex/process/AttachmentPreview.scala +++ b/modules/joex/src/main/scala/docspell/joex/process/AttachmentPreview.scala @@ -75,7 +75,7 @@ object AttachmentPreview { private def createRecord[F[_]: Sync]( store: Store[F], - collective: Ident, + collective: CollectiveId, png: Stream[F, Byte], ra: RAttachment ): F[RAttachmentPreview] = { diff --git a/modules/joex/src/main/scala/docspell/joex/process/TextAnalysis.scala b/modules/joex/src/main/scala/docspell/joex/process/TextAnalysis.scala index 3a8ab6aa..006a80ef 100644 --- a/modules/joex/src/main/scala/docspell/joex/process/TextAnalysis.scala +++ b/modules/joex/src/main/scala/docspell/joex/process/TextAnalysis.scala @@ -81,7 +81,7 @@ object TextAnalysis { labels <- analyser.annotate( ctx.logger, sett, - ctx.args.meta.collective, + ctx.args.meta.collective.valueAsIdent, rm.content.getOrElse("") ) } yield (rm.copy(nerlabels = labels.all.toList), AttachmentDates(rm, labels.dates)) diff --git a/modules/joex/src/main/scala/docspell/joex/process/TextExtraction.scala b/modules/joex/src/main/scala/docspell/joex/process/TextExtraction.scala index 52ce8601..420e1d38 100644 --- a/modules/joex/src/main/scala/docspell/joex/process/TextExtraction.scala +++ b/modules/joex/src/main/scala/docspell/joex/process/TextExtraction.scala @@ -50,7 +50,7 @@ object TextExtraction { None, ctx.args.meta.language ) - _ <- fts.indexData(ctx.logger, (idxItem +: txt.map(_.td)): _*) + _ <- fts.indexData(ctx.logger, idxItem +: txt.map(_.td): _*) dur <- start extractedTags = txt.flatMap(_.tags).distinct.toList _ <- ctx.logger.info(s"Text extraction finished in ${dur.formatExact}.") @@ -71,7 +71,7 @@ object TextExtraction { store: Store[F], cfg: ExtractConfig, lang: Language, - collective: Ident, + collective: CollectiveId, item: ItemData )(ra: RAttachment): F[Result] = { def makeTextData(pair: (RAttachmentMeta, List[String])): Result = diff --git a/modules/joex/src/main/scala/docspell/joex/scanmailbox/ScanMailboxTask.scala b/modules/joex/src/main/scala/docspell/joex/scanmailbox/ScanMailboxTask.scala index cd0fb02b..7add6baf 100644 --- a/modules/joex/src/main/scala/docspell/joex/scanmailbox/ScanMailboxTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/scanmailbox/ScanMailboxTask.scala @@ -42,15 +42,15 @@ object ScanMailboxTask { Task { ctx => for { _ <- ctx.logger.info( - s"=== Start importing mails for user ${ctx.args.account.user.id}" + s"=== Start importing mails for user ${ctx.args.account.login.id}" ) _ <- ctx.logger.debug(s"Settings: ${ctx.args.asJson.noSpaces}") mailCfg <- getMailSettings(ctx, store) folders = ctx.args.folders.mkString(", ") - userId = ctx.args.account.user + login = ctx.args.account.login imapConn = ctx.args.imapConnection _ <- ctx.logger.info( - s"Reading mails for user ${userId.id} from ${imapConn.id}/$folders" + s"Reading mails for user ${login.id} from ${imapConn.id}/$folders" ) _ <- importMails(cfg, mailCfg, emil, upload, joex, ctx, store) } yield () @@ -61,7 +61,7 @@ object ScanMailboxTask { def getMailSettings[F[_]: Sync](ctx: Context[F, Args], store: Store[F]): F[RUserImap] = store - .transact(RUserImap.getByName(ctx.args.account, ctx.args.imapConnection)) + .transact(RUserImap.getByName(ctx.args.account.userId, ctx.args.imapConnection)) .flatMap { case Some(c) => c.pure[F] case None => @@ -234,13 +234,13 @@ object ScanMailboxTask { ctx.logger.debug("Not matching on subjects. No filter given") *> headers.pure[F] } - def filterMessageIds[C](headers: Vector[MailHeaderItem]): F[Vector[MailHeaderItem]] = + def filterMessageIds(headers: Vector[MailHeaderItem]): F[Vector[MailHeaderItem]] = NonEmptyList.fromFoldable(headers.flatMap(_.mh.messageId)) match { case Some(nl) => for { archives <- store.transact( RAttachmentArchive - .findByMessageIdAndCollective(nl, ctx.args.account.collective) + .findByMessageIdAndCollective(nl, ctx.args.account.collectiveId) ) existing = archives.flatMap(_.messageId).toSet mails <- headers @@ -265,7 +265,7 @@ object ScanMailboxTask { store.transact( QOrganization .findPersonByContact( - ctx.args.account.collective, + ctx.args.account.collectiveId, from.address, Some(ContactKind.Email), Some(NonEmptyList.of(PersonUse.concerning)) @@ -320,7 +320,7 @@ object ScanMailboxTask { dir <- getDirection(mail.header) meta = OUpload.UploadMeta( Some(dir), - s"mailbox-${ctx.args.account.user.id}", + s"mailbox-${ctx.args.account.login.id}", args.itemFolder, Seq.empty, true, @@ -337,7 +337,12 @@ object ScanMailboxTask { priority = Priority.Low, tracker = None ) - res <- upload.submit(data, ctx.args.account, None) + res <- upload.submit( + data, + ctx.args.account.collectiveId, + ctx.args.account.userId.some, + None + ) } yield res } diff --git a/modules/joex/src/main/scala/docspell/joex/updatecheck/UpdateCheckTask.scala b/modules/joex/src/main/scala/docspell/joex/updatecheck/UpdateCheckTask.scala index 39dfbbbc..ce9f436e 100644 --- a/modules/joex/src/main/scala/docspell/joex/updatecheck/UpdateCheckTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/updatecheck/UpdateCheckTask.scala @@ -9,13 +9,11 @@ package docspell.joex.updatecheck import cats.data.OptionT import cats.effect._ import cats.implicits._ - import docspell.common._ import docspell.scheduler.Task import docspell.scheduler.usertask.UserTask import docspell.store.Store -import docspell.store.records.RUserEmail - +import docspell.store.records.{RUser, RUserEmail} import emil._ object UpdateCheckTask { @@ -83,7 +81,9 @@ object UpdateCheckTask { store: Store[F], cfg: UpdateCheckConfig ): F[RUserEmail] = - OptionT(store.transact(RUserEmail.getByName(cfg.senderAccount, cfg.smtpId))) + OptionT(store.transact(RUser.findByAccount(cfg.senderAccount))) + .map(_.uid) + .flatMap(uid => OptionT(store.transact(RUserEmail.getByName(uid, cfg.smtpId)))) .getOrElseF( Sync[F].raiseError( new Exception( diff --git a/modules/notification/impl/src/test/scala/docspell/notification/impl/context/TagsChangedCtxTest.scala b/modules/notification/impl/src/test/scala/docspell/notification/impl/context/TagsChangedCtxTest.scala index fe72545b..36bbd942 100644 --- a/modules/notification/impl/src/test/scala/docspell/notification/impl/context/TagsChangedCtxTest.scala +++ b/modules/notification/impl/src/test/scala/docspell/notification/impl/context/TagsChangedCtxTest.scala @@ -18,7 +18,7 @@ import munit._ class TagsChangedCtxTest extends FunSuite { val url = LenientUri.unsafe("http://test") - val account = AccountId(id("user2"), id("user2")) + val account = AccountInfo(CollectiveId(1), id("user2"), id("user-abc-def"), id("user2")) val tag = Tag(id("a-b-1"), "tag-red", Some("doctype")) val item = Item( id = id("item-1"), diff --git a/modules/store/src/main/scala/db/migration/data/DownloadZipArgs.scala b/modules/store/src/main/scala/db/migration/data/DownloadZipArgs.scala new file mode 100644 index 00000000..974f76ea --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/DownloadZipArgs.scala @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import db.migration.data.DownloadZipArgs.DownloadRequest +import docspell.common._ +import docspell.query.ItemQuery.Expr.ValidItemStates +import docspell.query.{ItemQuery, ItemQueryParser} +import docspell.store.queries.Query +import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} +import io.circe.{Decoder, Encoder} + +/** @deprecated replaced with a version using `AccountInfo` */ +final case class DownloadZipArgs(accountId: AccountId, req: DownloadRequest) + +object DownloadZipArgs { + val taskName: Ident = Ident.unsafe("download-query-zip") + + final case class DownloadRequest( + query: ItemQuery, + fileType: DownloadAllType, + maxFiles: Int, + maxSize: ByteSize + ) { + def toQuery(account: AccountInfo): Query = + Query + .all(account) + .withFix(_.andQuery(ValidItemStates)) + .withCond(_ => Query.QueryExpr(query.expr)) + + def itemQueryString = + ItemQueryParser.asString(query.expr) + } + object DownloadRequest { + implicit val itemQueryDecoder: Decoder[ItemQuery] = + Decoder.decodeString.emap(str => ItemQueryParser.parse(str).left.map(_.render)) + + implicit val itemQueryEncoder: Encoder[ItemQuery] = + Encoder.encodeString.contramap(q => + q.raw.getOrElse(ItemQueryParser.unsafeAsString(q.expr)) + ) + + implicit val jsonDecoder: Decoder[DownloadRequest] = + deriveDecoder + + implicit val jsonEncoder: Encoder[DownloadRequest] = + deriveEncoder + } + + implicit val jsonEncoder: Encoder[DownloadZipArgs] = + deriveEncoder + implicit val jsonDecoder: Decoder[DownloadZipArgs] = + deriveDecoder +} diff --git a/modules/store/src/main/scala/docspell/store/records/RNotificationChannel.scala b/modules/store/src/main/scala/docspell/store/records/RNotificationChannel.scala index 4eb741f3..31802d93 100644 --- a/modules/store/src/main/scala/docspell/store/records/RNotificationChannel.scala +++ b/modules/store/src/main/scala/docspell/store/records/RNotificationChannel.scala @@ -202,7 +202,7 @@ object RNotificationChannel { s"Looking up user smtp for ${userId.id} and ${conn.id}" ) ) - mailConn <- OptionT(RUserEmail.getByUser(userId, conn)) + mailConn <- OptionT(RUserEmail.getByName(userId, conn)) rec = RNotificationChannelMail( id, userId, diff --git a/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala b/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala index cd3e2a53..22995f1d 100644 --- a/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala +++ b/modules/store/src/main/scala/docspell/store/records/RUserEmail.scala @@ -175,13 +175,6 @@ object RUserEmail { run(select(t.all), from(t), t.uid === userId).query[RUserEmail].to[Vector] } - def getByUser(userId: Ident, name: Ident): ConnectionIO[Option[RUserEmail]] = { - val t = Table(None) - run(select(t.all), from(t), t.uid === userId && t.name === name) - .query[RUserEmail] - .option - } - private def findByAccount0( userId: Ident, nameQ: Option[String], diff --git a/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala b/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala index 9647d88f..e4876c74 100644 --- a/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala +++ b/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala @@ -57,10 +57,10 @@ class TempFtsOpsTest extends DatabaseTest { def prepareItems(store: Store[IO]) = for { - _ <- store.transact(RCollective.insert(makeCollective(CollectiveId(2)))) - _ <- store.transact(RUser.insert(makeUser(CollectiveId(2)))) + cid <- store.transact(RCollective.insert(makeCollective)) + _ <- store.transact(RUser.insert(makeUser(cid))) items = (0 until 200) - .map(makeItem(_, CollectiveId(2))) + .map(makeItem(_, cid)) .toList _ <- items.traverse(i => store.transact(RItem.insert(i))) } yield () @@ -184,9 +184,9 @@ class TempFtsOpsTest extends DatabaseTest { Timestamp(Instant.now) ) - def makeCollective(cid: CollectiveId): RCollective = + def makeCollective: RCollective = RCollective( - cid, + CollectiveId.unknown, DocspellSystem.account.collective, CollectiveState.Active, Language.English, From 66265d8455149500e2cdad71821ee7f3526d9f90 Mon Sep 17 00:00:00 2001 From: eikek Date: Fri, 5 Aug 2022 13:33:06 +0200 Subject: [PATCH 07/15] Migrate file_id columns --- .../h2/V1.39.1__file_meta_migration.sql | 94 ++++++++++++++++++ .../mariadb/V1.39.0__collective_id.sql | 17 +--- .../mariadb/V1.39.1__file_meta_migration.sql | 92 ++++++++++++++++++ .../postgresql/V1.39.0__collective_id.sql | 3 +- .../V1.39.1__file_meta_migration.sql | 95 +++++++++++++++++++ 5 files changed, 284 insertions(+), 17 deletions(-) create mode 100644 modules/store/src/main/resources/db/migration/h2/V1.39.1__file_meta_migration.sql create mode 100644 modules/store/src/main/resources/db/migration/mariadb/V1.39.1__file_meta_migration.sql create mode 100644 modules/store/src/main/resources/db/migration/postgresql/V1.39.1__file_meta_migration.sql diff --git a/modules/store/src/main/resources/db/migration/h2/V1.39.1__file_meta_migration.sql b/modules/store/src/main/resources/db/migration/h2/V1.39.1__file_meta_migration.sql new file mode 100644 index 00000000..bcad45aa --- /dev/null +++ b/modules/store/src/main/resources/db/migration/h2/V1.39.1__file_meta_migration.sql @@ -0,0 +1,94 @@ +-- drop constraints to be able to update file ids +alter table "addon_archive" drop constraint "CONSTRAINT_8B982"; +alter table "attachment_archive" drop constraint "CONSTRAINT_13E"; +alter table "attachment" drop constraint "CONSTRAINT_8AF"; +alter table "attachment_source" drop constraint "CONSTRAINT_698"; +alter table "classifier_model" drop constraint "CONSTRAINT_BC7B"; +alter table "download_query" drop constraint "CONSTRAINT_3ABF"; +alter table "attachment_preview" drop constraint "CONSTRAINT_2D8"; + +-- create temporary tables holding old and new ids +create table "temp_prefixes"( + old_prefix varchar(255) not null primary key, + new_prefix varchar(255) not null +); +insert into "temp_prefixes" +select concat(name, '/'), concat(id, '/') from collective; + +create table "temp_file_ids"( + old_id varchar(255) not null primary key, + old_prefix varchar(255) not null, + new_prefix varchar(255) not null, + new_id varchar(255) not null +); + +with ids_orig(old_id, prefix) as + (select file_id, substring(file_id, 0, position('/' in file_id)) + from filemeta fm + ) +insert into "temp_file_ids" +select fm.old_id, tp.old_prefix, tp.new_prefix, replace(fm.old_id, tp.old_prefix, tp.new_prefix) as new_id +from ids_orig fm +inner join "temp_prefixes" tp on fm.prefix = tp.old_prefix; + +-- remove orphaned files and chunks +delete from filemeta +where "file_id" not in (select "old_id" from "temp_file_ids"); + +delete from filechunk +where "file_id" not in (select "old_id" from "temp_file_ids"); + +-- update all references +update "filemeta" fm set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = fm."file_id"); + +update "addon_archive" aa set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = aa."file_id"); + +update "attachment_archive" aa set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = aa."file_id"); + +update "attachment" a set "filemetaid" = + (select t.new_id from "temp_file_ids" t where t."old_id" = a."filemetaid"); + +update "attachment_source" a set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = a."file_id"); + +update "classifier_model" cm set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = cm."file_id"); + +update "download_query" dq set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = dq."file_id"); + +update "attachment_preview" ap set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = ap."file_id"); + +-- update filechunks +update "filechunk" fc set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = fc."file_id"); + +-- re-create the constraints +alter table "addon_archive" add constraint "addon_archive_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +alter table "attachment_archive" add constraint "attachment_archive_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +alter table "attachment" add constraint "attachment_filemetaid_fkey" +foreign key ("filemetaid") references "filemeta"("file_id"); + +alter table "attachment_source" add constraint "attachment_source_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +alter table "classifier_model" add constraint "classifier_model_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +alter table "download_query" add constraint "download_query_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +alter table "attachment_preview" add constraint "attachment_preview_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +-- drop temporary tables +drop table "temp_file_ids"; +drop table "temp_prefixes"; diff --git a/modules/store/src/main/resources/db/migration/mariadb/V1.39.0__collective_id.sql b/modules/store/src/main/resources/db/migration/mariadb/V1.39.0__collective_id.sql index aa6616ea..4732ec83 100644 --- a/modules/store/src/main/resources/db/migration/mariadb/V1.39.0__collective_id.sql +++ b/modules/store/src/main/resources/db/migration/mariadb/V1.39.0__collective_id.sql @@ -1,6 +1,5 @@ -- add new id column alter table `collective` add column `id` int auto_increment not null unique; -create unique index `collective_id_idx` on `collective`(`id`); -- change references: source alter table `source` add column `coll_id` int not null default 0; @@ -17,7 +16,6 @@ alter table `source` alter column `coll_id` drop default; alter table `tag` add column `coll_id` int not null default 0; update `tag` t set `coll_id` = (select id from collective where `cid` = t.`cid`); create index `tag_coll_id_idx` on `tag`(`coll_id`); -create unique index `tag_coll_id_name_idx` on `tag`(`coll_id`, `name`); alter table `tag` add constraint `tag_coll_id_name_key` unique(`coll_id`, `name`); alter table `tag` add constraint `tag_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); alter table `tag` drop constraint `tag_ibfk_1`; @@ -29,7 +27,6 @@ alter table `tag` alter column `coll_id` drop default; alter table `user_` add column `coll_id` int not null default 0; update `user_` t set `coll_id` = (select id from collective where `cid` = t.`cid`); create index `user__coll_id_idx` on `user_`(`coll_id`); -create unique index `user__coll_id_login_idx` on `user_`(`coll_id`, `login`); alter table `user_` add constraint `user__coll_id_login_key` unique(`coll_id`, `login`); alter table `user_` add constraint `user__coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); alter table `user_` drop constraint `user__ibfk_1`; @@ -41,7 +38,6 @@ alter table `user_` alter column `coll_id` drop default; alter table `query_bookmark` add column `coll_id` int not null default 0; update `query_bookmark` t set `coll_id` = (select id from collective where `cid` = t.`cid`); create index `query_bookmark_coll_id_idx` on `query_bookmark`(`coll_id`); -create unique index `query_bookmark_coll_id__user_id_name_idx` on `query_bookmark`(`coll_id`, `__user_id`, `name`); alter table `query_bookmark` add constraint `query_bookmark_coll_id__user_id_name_key` unique(`coll_id`, `__user_id`, `name`); alter table `query_bookmark` add constraint `query_bookmark_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); alter table `query_bookmark` drop constraint `query_bookmark_ibfk_2`; @@ -53,7 +49,6 @@ alter table `query_bookmark` alter column `coll_id` drop default; alter table `person` add column `coll_id` int not null default 0; update `person` t set `coll_id` = (select id from collective where `cid` = t.`cid`); create index `person_coll_id_idx` on `person`(`coll_id`); -create unique index `person_coll_id_name_idx` on `person`(`coll_id`, `name`); alter table `person` add constraint `person_coll_id_name_key` unique(`coll_id`, `name`); alter table `person` add constraint `person_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); alter table `person` drop constraint `person_ibfk_1`; @@ -65,7 +60,6 @@ alter table `person` alter column `coll_id` drop default; alter table `organization` add column `coll_id` int not null default 0; update `organization` t set `coll_id` = (select id from collective where `cid` = t.`cid`); create index `organization_coll_id_idx` on `organization`(`coll_id`); -create unique index `organization_coll_id_name_idx` on `organization`(`coll_id`, `name`); alter table `organization` add constraint `organization_coll_id_name_key` unique(`coll_id`, `name`); alter table `organization` add constraint `organization_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); alter table `organization` drop constraint `cid`; @@ -77,7 +71,6 @@ alter table `organization` alter column `coll_id` drop default; alter table `item_link` add column `coll_id` int not null default 0; update `item_link` t set `coll_id` = (select id from collective where `cid` = t.`cid`); create index `item_link_coll_id_idx` on `item_link`(`coll_id`); -create unique index `item_link_coll_id_item1_item2_idx` on `item_link`(`coll_id`, `item1`, `item2`); alter table `item_link` add constraint `item_link_coll_id_item1_item2_key` unique(`coll_id`, `item1`, `item2`); alter table `item_link` add constraint `item_link_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); alter table `item_link` drop constraint `item_link_ibfk_1`; @@ -100,7 +93,6 @@ update `folder` t set `coll_id` = (select id from collective where `cid` = t.`ci alter table `folder` drop constraint `folder_ibfk_1`; alter table `folder` drop constraint `name`; create index `folder_coll_id_idx` on `folder`(`coll_id`); -create unique index `folder_coll_id_name_idx` on `folder`(`coll_id`, `name`); alter table `folder` add constraint `folder_coll_id_name_key` unique(`coll_id`, `name`); alter table `folder` add constraint `folder_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); alter table `folder` drop column `cid`; @@ -110,7 +102,6 @@ alter table `folder` alter column `coll_id` drop default; alter table `equipment` add column `coll_id` int not null default 0; update `equipment` t set `coll_id` = (select id from collective where `cid` = t.`cid`); create index `equipment_coll_id_idx` on `equipment`(`coll_id`); -create unique index `equipment_coll_id_name_idx` on `equipment`(`coll_id`, `name`); alter table `equipment` add constraint `equipment_coll_id_name_key` unique(`coll_id`, `name`); alter table `equipment` add constraint `equipment_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); alter table `equipment` drop constraint `equipment_ibfk_1`; @@ -124,7 +115,7 @@ alter table `empty_trash_setting` add column `coll_id` int not null default 0; update `empty_trash_setting` t set `coll_id` = (select id from collective where `cid` = t.`cid`); create index `empty_trash_setting_coll_id_idx` on `empty_trash_setting`(`coll_id`); alter table `empty_trash_setting` add constraint `empty_trash_setting_coll_id_fkey` -foreign key (`coll_id`) references `collective`(`id`); + foreign key (`coll_id`) references `collective`(`id`); alter table `empty_trash_setting` drop constraint `empty_trash_setting_ibfk_1`; alter table `empty_trash_setting` drop column `cid`; alter table `empty_trash_setting` alter column `coll_id` drop default; @@ -144,7 +135,6 @@ alter table `download_query` alter column `coll_id` drop default; alter table `custom_field` add column `coll_id` int not null default 0; update `custom_field` t set `coll_id` = (select id from collective where `cid` = t.`cid`); create index `custom_field_coll_id_idx` on `custom_field`(`coll_id`); -create unique index `custom_field_coll_id_name_idx` on `custom_field`(`coll_id`, `name`); alter table `custom_field` add constraint `custom_field_coll_id_name_key` unique(`coll_id`, `name`); alter table `custom_field` add constraint `custom_field_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); alter table `custom_field` drop constraint `custom_field_ibfk_1`; @@ -166,7 +156,6 @@ alter table `collective_password` alter column `coll_id` drop default; alter table `client_settings_collective` add column `coll_id` int not null default 0; update `client_settings_collective` t set `coll_id` = (select id from collective where `cid` = t.`cid`); create index `client_settings_collective_coll_id_idx` on `client_settings_collective`(`coll_id`); -create unique index `client_settings_collective_coll_id_client_id_idx` on `client_settings_collective`(`coll_id`, `client_id`); alter table `client_settings_collective` add constraint `client_settings_collective_coll_id_name_key` unique(`coll_id`, `client_id`); alter table `client_settings_collective` add constraint `client_settings_collective_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); alter table `client_settings_collective` drop constraint `client_settings_collective_ibfk_1`; @@ -188,7 +177,6 @@ alter table `classifier_setting` alter column `coll_id` drop default; alter table `classifier_model` add column `coll_id` int not null default 0; update `classifier_model` t set `coll_id` = (select id from collective where `cid` = t.`cid`); create index `classifier_model_coll_id_idx` on `classifier_model`(`coll_id`); -create unique index `classifier_model_coll_id_name_idx` on `classifier_model`(`coll_id`, `name`); alter table `classifier_model` add constraint `classifier_model_coll_id_name_key` unique(`coll_id`, `name`); alter table `classifier_model` add constraint `classifier_model_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); alter table `classifier_model` drop constraint `classifier_model_ibfk_1`; @@ -216,8 +204,6 @@ alter table `addon_archive` drop constraint `addon_archive_ibfk_1`; alter table `addon_archive` drop index `addon_archive_cid_idx`; create index `addon_archive_coll_id_idx` on `addon_archive`(`coll_id`); -create unique index `addon_archive_coll_id_name_version_idx` on `addon_archive`(`coll_id`, `name`, `version`); -create unique index `addon_archive_coll_id_original_url_idx` on `addon_archive`(`coll_id`, `original_url`); alter table `addon_archive` add constraint `addon_archive_coll_id_name_version_key` unique(`coll_id`, `name`, `version`); alter table `addon_archive` add constraint `addon_archive_coll_id_original_url_key` unique(`coll_id`, `original_url`); alter table `addon_archive` add constraint `addon_archive_coll_id_fkey` foreign key (`coll_id`) references `collective`(`id`); @@ -228,5 +214,4 @@ alter table `addon_archive` alter column `coll_id` drop default; alter table `collective` drop primary key; alter table `collective` add constraint `collective_id_pkey` primary key (`id`); alter table `collective` rename column `cid` to `name`; -create unique index `collective_name_idx` on `collective`(`name`); alter table `collective` add constraint `collective_name_key` unique(`name`); diff --git a/modules/store/src/main/resources/db/migration/mariadb/V1.39.1__file_meta_migration.sql b/modules/store/src/main/resources/db/migration/mariadb/V1.39.1__file_meta_migration.sql new file mode 100644 index 00000000..f887cc54 --- /dev/null +++ b/modules/store/src/main/resources/db/migration/mariadb/V1.39.1__file_meta_migration.sql @@ -0,0 +1,92 @@ +-- drop constraints to be able to update file ids +alter table `addon_archive` drop constraint `addon_archive_ibfk_2`; +alter table `attachment_archive` drop constraint `attachment_archive_ibfk_1`; +alter table `attachment` drop constraint `attachment_ibfk_2`; +alter table `attachment_source` drop constraint `attachment_source_ibfk_1`; +alter table `classifier_model` drop constraint `classifier_model_ibfk_2`; +alter table `download_query` drop constraint `download_query_ibfk_2`; +alter table `attachment_preview` drop constraint `attachment_preview_ibfk_1`; + +-- create temporary tables holding old and new ids +create table `temp_prefixes`( + old_prefix varchar(255) not null primary key, + new_prefix varchar(255) not null +); +insert into `temp_prefixes` +select concat(name, '/'), concat(id, '/') from collective; + +create table `temp_file_ids`( + old_id varchar(255) not null primary key, + old_prefix varchar(255) not null, + new_prefix varchar(255) not null, + new_id varchar(255) not null +); + + +insert into `temp_file_ids` +select fm.old_id, tp.old_prefix, tp.new_prefix, replace(fm.old_id, tp.old_prefix, tp.new_prefix) as new_id +from + (select file_id as old_id, substring(file_id, 1, position('/' in file_id)) as prefix from filemeta) fm +inner join `temp_prefixes` tp on fm.prefix = tp.old_prefix; + +-- remove orphaned files and chunks +delete from filemeta +where `file_id` not in (select `old_id` from `temp_file_ids`); + +delete from filechunk +where `file_id` not in (select `old_id` from `temp_file_ids`); + +-- update all references +update `filemeta` fm set `file_id` = + (select t.new_id from `temp_file_ids` t where t.`old_id` = fm.`file_id`); + +update `addon_archive` aa set `file_id` = + (select t.new_id from `temp_file_ids` t where t.`old_id` = aa.`file_id`); + +update `attachment_archive` aa set `file_id` = + (select t.new_id from `temp_file_ids` t where t.`old_id` = aa.`file_id`); + +update `attachment` a set `filemetaid` = + (select t.new_id from `temp_file_ids` t where t.`old_id` = a.`filemetaid`); + +update `attachment_source` a set `file_id` = + (select t.new_id from `temp_file_ids` t where t.`old_id` = a.`file_id`); + +update `classifier_model` cm set `file_id` = + (select t.new_id from `temp_file_ids` t where t.`old_id` = cm.`file_id`); + +update `download_query` dq set `file_id` = + (select t.new_id from `temp_file_ids` t where t.`old_id` = dq.`file_id`); + +update `attachment_preview` ap set `file_id` = + (select t.new_id from `temp_file_ids` t where t.`old_id` = ap.`file_id`); + +-- update filechunks +update `filechunk` fc set `file_id` = + (select t.new_id from `temp_file_ids` t where t.`old_id` = fc.`file_id`); + +-- re-create the constraints +alter table `addon_archive` add constraint `addon_archive_file_id_fkey` +foreign key (`file_id`) references `filemeta`(`file_id`); + +alter table `attachment_archive` add constraint `attachment_archive_file_id_fkey` +foreign key (`file_id`) references `filemeta`(`file_id`); + +alter table `attachment` add constraint `attachment_filemetaid_fkey` +foreign key (`filemetaid`) references `filemeta`(`file_id`); + +alter table `attachment_source` add constraint `attachment_source_file_id_fkey` +foreign key (`file_id`) references `filemeta`(`file_id`); + +alter table `classifier_model` add constraint `classifier_model_file_id_fkey` +foreign key (`file_id`) references `filemeta`(`file_id`); + +alter table `download_query` add constraint `download_query_file_id_fkey` +foreign key (`file_id`) references `filemeta`(`file_id`); + +alter table `attachment_preview` add constraint `attachment_preview_file_id_fkey` +foreign key (`file_id`) references `filemeta`(`file_id`); + +-- drop temporary tables +drop table `temp_file_ids`; +drop table `temp_prefixes`; diff --git a/modules/store/src/main/resources/db/migration/postgresql/V1.39.0__collective_id.sql b/modules/store/src/main/resources/db/migration/postgresql/V1.39.0__collective_id.sql index 6cc6fe15..0a71b955 100644 --- a/modules/store/src/main/resources/db/migration/postgresql/V1.39.0__collective_id.sql +++ b/modules/store/src/main/resources/db/migration/postgresql/V1.39.0__collective_id.sql @@ -29,7 +29,8 @@ create index "user__coll_id_idx" on "user_"("coll_id"); create unique index "user__coll_id_login_idx" on "user_"("coll_id", "login"); alter table "user_" add constraint "user__coll_id_login_key" unique using index "user__coll_id_login_idx"; alter table "user_" add constraint "user__coll_id_fkey" foreign key ("coll_id") references "collective"("id"); -alter table "user_" drop constraint "user__cid_fkey"; +alter table "user_" drop constraint if exists "user__cid_fkey"; +alter table "user_" drop constraint if exists "user_cid_fkey"; alter table "user_" drop column "cid"; alter table "user_" alter column "coll_id" drop default; diff --git a/modules/store/src/main/resources/db/migration/postgresql/V1.39.1__file_meta_migration.sql b/modules/store/src/main/resources/db/migration/postgresql/V1.39.1__file_meta_migration.sql new file mode 100644 index 00000000..9216be55 --- /dev/null +++ b/modules/store/src/main/resources/db/migration/postgresql/V1.39.1__file_meta_migration.sql @@ -0,0 +1,95 @@ +-- drop constraints to be able to update file ids +alter table "addon_archive" drop constraint "addon_archive_file_id_fkey"; +alter table "attachment_archive" drop constraint "attachment_archive_file_id_fkey"; +alter table "attachment" drop constraint "attachment_filemetaid_fkey"; +alter table "attachment_source" drop constraint "attachment_source_file_id_fkey"; +alter table "classifier_model" drop constraint "classifier_model_file_id_fkey"; +alter table "download_query" drop constraint "download_query_file_id_fkey"; +alter table "attachment_preview" drop constraint "attachment_preview_file_id_fkey"; + +-- create temporary tables holding old and new ids +create table "temp_prefixes"( + old_prefix varchar(255) not null primary key, + new_prefix varchar(255) not null +); +insert into "temp_prefixes" +select concat(name, '/'), concat(id, '/') from collective; + +create table "temp_file_ids"( + old_id varchar(255) not null primary key, + old_prefix varchar(255) not null, + new_prefix varchar(255) not null, + new_id varchar(255) not null +); + +with ids_orig(old_id, prefix) as + (select file_id, concat(substring(file_id, 0, position('/' in file_id)), '/') + from filemeta fm + ) +insert into "temp_file_ids" +select fm.old_id, tp.old_prefix, tp.new_prefix, replace(fm.old_id, tp.old_prefix, tp.new_prefix) as new_id +from ids_orig fm +inner join "temp_prefixes" tp on fm.prefix = tp.old_prefix; + +-- remove orphaned files and chunks +delete from filemeta +where "file_id" not in (select "old_id" from "temp_file_ids"); + +delete from filechunk +where "file_id" not in (select "old_id" from "temp_file_ids"); + + +-- update all references +update "filemeta" fm set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = fm."file_id"); + +update "addon_archive" aa set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = aa."file_id"); + +update "attachment_archive" aa set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = aa."file_id"); + +update "attachment" a set "filemetaid" = + (select t.new_id from "temp_file_ids" t where t."old_id" = a."filemetaid"); + +update "attachment_source" a set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = a."file_id"); + +update "classifier_model" cm set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = cm."file_id"); + +update "download_query" dq set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = dq."file_id"); + +update "attachment_preview" ap set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = ap."file_id"); + +-- update filechunks +update "filechunk" fc set "file_id" = + (select t.new_id from "temp_file_ids" t where t."old_id" = fc."file_id"); + +-- re-create the constraints +alter table "addon_archive" add constraint "addon_archive_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +alter table "attachment_archive" add constraint "attachment_archive_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +alter table "attachment" add constraint "attachment_filemetaid_fkey" +foreign key ("filemetaid") references "filemeta"("file_id"); + +alter table "attachment_source" add constraint "attachment_source_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +alter table "classifier_model" add constraint "classifier_model_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +alter table "download_query" add constraint "download_query_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +alter table "attachment_preview" add constraint "attachment_preview_file_id_fkey" +foreign key ("file_id") references "filemeta"("file_id"); + +-- drop temporary tables +drop table "temp_file_ids"; +drop table "temp_prefixes"; From ae4c49027fcbcf3f6feaf608a42062bc027bec0d Mon Sep 17 00:00:00 2001 From: eikek Date: Fri, 5 Aug 2022 14:27:34 +0200 Subject: [PATCH 08/15] Refactor scala base db migrations. Add marker trait for task args --- .../backend/task/DownloadZipArgs.scala | 1 + .../docspell/common/AllPreviewsArgs.scala | 2 +- .../docspell/common/ConvertAllPdfArgs.scala | 2 +- .../docspell/common/EmptyTrashArgs.scala | 2 +- .../docspell/common/FileCopyTaskArgs.scala | 1 + .../common/FileIntegrityCheckArgs.scala | 2 +- .../docspell/common/ItemAddonTaskArgs.scala | 2 +- .../docspell/common/LearnClassifierArgs.scala | 3 +- .../docspell/common/MakePageCountArgs.scala | 2 +- .../docspell/common/MakePreviewArgs.scala | 2 +- .../docspell/common/ProcessItemArgs.scala | 2 +- .../docspell/common/ReIndexTaskArgs.scala | 2 +- .../docspell/common/ReProcessItemArgs.scala | 1 + .../docspell/common/ScanMailboxArgs.scala | 2 +- .../common/ScheduledAddonTaskArgs.scala | 1 + .../scala/docspell/common/TaskArguments.scala | 8 ++ .../api/PeriodicDueItemsArgs.scala | 2 +- .../notification/api/PeriodicQueryArgs.scala | 2 +- .../db/migration/common/JsonCodecs.scala | 14 +++ .../MigrateDueItemTasks.scala} | 105 +----------------- .../migration/common/MigrateNotifyTasks.scala | 102 +++++++++++++++++ .../migration/common/TransactorSupport.scala | 17 +++ .../h2/V1_29_2__MigrateNotifyTask.scala | 12 +- .../h2/V1_32_2__MigrateChannels.scala | 12 +- .../mariadb/V1_29_2__MigrateNotifyTask.scala | 12 +- .../mariadb/V1_32_2__MigrateChannels.scala | 13 +-- .../V1_29_2__MigrateNotifyTask.scala | 12 +- .../postgresql/V1_32_2__MigrateChannels.scala | 12 +- 28 files changed, 188 insertions(+), 162 deletions(-) create mode 100644 modules/common/src/main/scala/docspell/common/TaskArguments.scala create mode 100644 modules/store/src/main/scala/db/migration/common/JsonCodecs.scala rename modules/store/src/main/scala/db/migration/{MigrationTasks.scala => common/MigrateDueItemTasks.scala} (55%) create mode 100644 modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala create mode 100644 modules/store/src/main/scala/db/migration/common/TransactorSupport.scala diff --git a/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala b/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala index 6e689d57..29e33863 100644 --- a/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala +++ b/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala @@ -12,6 +12,7 @@ import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} final case class DownloadZipArgs(account: AccountInfo, req: DownloadRequest) + extends TaskArguments object DownloadZipArgs { val taskName: Ident = Ident.unsafe("download-query-zip") diff --git a/modules/common/src/main/scala/docspell/common/AllPreviewsArgs.scala b/modules/common/src/main/scala/docspell/common/AllPreviewsArgs.scala index 2256346f..36210918 100644 --- a/modules/common/src/main/scala/docspell/common/AllPreviewsArgs.scala +++ b/modules/common/src/main/scala/docspell/common/AllPreviewsArgs.scala @@ -18,7 +18,7 @@ import io.circe.{Decoder, Encoder} case class AllPreviewsArgs( collective: Option[CollectiveId], storeMode: MakePreviewArgs.StoreMode -) +) extends TaskArguments object AllPreviewsArgs { diff --git a/modules/common/src/main/scala/docspell/common/ConvertAllPdfArgs.scala b/modules/common/src/main/scala/docspell/common/ConvertAllPdfArgs.scala index 644fd412..5f1463a5 100644 --- a/modules/common/src/main/scala/docspell/common/ConvertAllPdfArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ConvertAllPdfArgs.scala @@ -16,7 +16,7 @@ import io.circe.generic.semiauto._ * submitted by this task run in the realm of the collective (and only their files are * considered). If it is empty, it is a system task and all files are considered. */ -case class ConvertAllPdfArgs(collective: Option[CollectiveId]) +case class ConvertAllPdfArgs(collective: Option[CollectiveId]) extends TaskArguments object ConvertAllPdfArgs { diff --git a/modules/common/src/main/scala/docspell/common/EmptyTrashArgs.scala b/modules/common/src/main/scala/docspell/common/EmptyTrashArgs.scala index 0fb47266..4566db0f 100644 --- a/modules/common/src/main/scala/docspell/common/EmptyTrashArgs.scala +++ b/modules/common/src/main/scala/docspell/common/EmptyTrashArgs.scala @@ -20,7 +20,7 @@ import io.circe.generic.semiauto._ case class EmptyTrashArgs( collective: CollectiveId, minAge: Duration -) { +) extends TaskArguments { def makeSubject: String = s"Empty Trash: Remove older than ${minAge.toJava}" diff --git a/modules/common/src/main/scala/docspell/common/FileCopyTaskArgs.scala b/modules/common/src/main/scala/docspell/common/FileCopyTaskArgs.scala index 5026a775..6d04faa1 100644 --- a/modules/common/src/main/scala/docspell/common/FileCopyTaskArgs.scala +++ b/modules/common/src/main/scala/docspell/common/FileCopyTaskArgs.scala @@ -22,6 +22,7 @@ import io.circe.{Decoder, Encoder} * selecting "all", it means all enabled stores. */ final case class FileCopyTaskArgs(from: Option[Ident], to: Selection) + extends TaskArguments object FileCopyTaskArgs { val taskName = Ident.unsafe("copy-file-repositories") diff --git a/modules/common/src/main/scala/docspell/common/FileIntegrityCheckArgs.scala b/modules/common/src/main/scala/docspell/common/FileIntegrityCheckArgs.scala index 671596f3..55c51536 100644 --- a/modules/common/src/main/scala/docspell/common/FileIntegrityCheckArgs.scala +++ b/modules/common/src/main/scala/docspell/common/FileIntegrityCheckArgs.scala @@ -9,7 +9,7 @@ package docspell.common import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} -final case class FileIntegrityCheckArgs(pattern: FileKeyPart) {} +final case class FileIntegrityCheckArgs(pattern: FileKeyPart) extends TaskArguments object FileIntegrityCheckArgs { val taskName: Ident = Ident.unsafe("all-file-integrity-check") diff --git a/modules/common/src/main/scala/docspell/common/ItemAddonTaskArgs.scala b/modules/common/src/main/scala/docspell/common/ItemAddonTaskArgs.scala index 128a7a9b..fe999637 100644 --- a/modules/common/src/main/scala/docspell/common/ItemAddonTaskArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ItemAddonTaskArgs.scala @@ -18,7 +18,7 @@ final case class ItemAddonTaskArgs( collective: CollectiveId, itemId: Ident, addonRunConfigs: Set[Ident] -) +) extends TaskArguments object ItemAddonTaskArgs { val taskName: Ident = Ident.unsafe("addon-existing-item") diff --git a/modules/common/src/main/scala/docspell/common/LearnClassifierArgs.scala b/modules/common/src/main/scala/docspell/common/LearnClassifierArgs.scala index 7e36f32d..6aa7ff92 100644 --- a/modules/common/src/main/scala/docspell/common/LearnClassifierArgs.scala +++ b/modules/common/src/main/scala/docspell/common/LearnClassifierArgs.scala @@ -19,11 +19,10 @@ import io.circe.generic.semiauto._ */ case class LearnClassifierArgs( collectiveId: CollectiveId -) { +) extends TaskArguments { def makeSubject: String = "Learn tags" - } object LearnClassifierArgs { diff --git a/modules/common/src/main/scala/docspell/common/MakePageCountArgs.scala b/modules/common/src/main/scala/docspell/common/MakePageCountArgs.scala index 41c4e72e..3e0933d3 100644 --- a/modules/common/src/main/scala/docspell/common/MakePageCountArgs.scala +++ b/modules/common/src/main/scala/docspell/common/MakePageCountArgs.scala @@ -14,7 +14,7 @@ import io.circe.{Decoder, Encoder} */ case class MakePageCountArgs( attachment: Ident -) +) extends TaskArguments object MakePageCountArgs { diff --git a/modules/common/src/main/scala/docspell/common/MakePreviewArgs.scala b/modules/common/src/main/scala/docspell/common/MakePreviewArgs.scala index 4d3b2ff9..e83e2f01 100644 --- a/modules/common/src/main/scala/docspell/common/MakePreviewArgs.scala +++ b/modules/common/src/main/scala/docspell/common/MakePreviewArgs.scala @@ -16,7 +16,7 @@ import io.circe.{Decoder, Encoder} case class MakePreviewArgs( attachment: Ident, store: MakePreviewArgs.StoreMode -) +) extends TaskArguments object MakePreviewArgs { diff --git a/modules/common/src/main/scala/docspell/common/ProcessItemArgs.scala b/modules/common/src/main/scala/docspell/common/ProcessItemArgs.scala index 5c830158..edbee9a7 100644 --- a/modules/common/src/main/scala/docspell/common/ProcessItemArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ProcessItemArgs.scala @@ -22,7 +22,7 @@ import io.circe.generic.semiauto._ * * It is also re-used by the 'ReProcessItem' task. */ -case class ProcessItemArgs(meta: ProcessMeta, files: List[File]) { +case class ProcessItemArgs(meta: ProcessMeta, files: List[File]) extends TaskArguments { def makeSubject: String = files.flatMap(_.name) match { diff --git a/modules/common/src/main/scala/docspell/common/ReIndexTaskArgs.scala b/modules/common/src/main/scala/docspell/common/ReIndexTaskArgs.scala index 8e3326ad..1677cf3f 100644 --- a/modules/common/src/main/scala/docspell/common/ReIndexTaskArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ReIndexTaskArgs.scala @@ -9,7 +9,7 @@ package docspell.common import io.circe._ import io.circe.generic.semiauto._ -final case class ReIndexTaskArgs(collective: Option[CollectiveId]) +final case class ReIndexTaskArgs(collective: Option[CollectiveId]) extends TaskArguments object ReIndexTaskArgs { val taskName = Ident.unsafe("full-text-reindex") diff --git a/modules/common/src/main/scala/docspell/common/ReProcessItemArgs.scala b/modules/common/src/main/scala/docspell/common/ReProcessItemArgs.scala index ff2a0e97..6125f0b3 100644 --- a/modules/common/src/main/scala/docspell/common/ReProcessItemArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ReProcessItemArgs.scala @@ -16,6 +16,7 @@ import io.circe.{Decoder, Encoder} * list is empty, then all attachments are re-processed. */ case class ReProcessItemArgs(itemId: Ident, attachments: List[Ident]) + extends TaskArguments object ReProcessItemArgs { diff --git a/modules/common/src/main/scala/docspell/common/ScanMailboxArgs.scala b/modules/common/src/main/scala/docspell/common/ScanMailboxArgs.scala index 963c3808..d22945c2 100644 --- a/modules/common/src/main/scala/docspell/common/ScanMailboxArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ScanMailboxArgs.scala @@ -49,7 +49,7 @@ case class ScanMailboxArgs( postHandleAll: Option[Boolean], // Exclude the mail body when importing attachmentsOnly: Option[Boolean] -) +) extends TaskArguments object ScanMailboxArgs { diff --git a/modules/common/src/main/scala/docspell/common/ScheduledAddonTaskArgs.scala b/modules/common/src/main/scala/docspell/common/ScheduledAddonTaskArgs.scala index 5ce1bca0..5678f495 100644 --- a/modules/common/src/main/scala/docspell/common/ScheduledAddonTaskArgs.scala +++ b/modules/common/src/main/scala/docspell/common/ScheduledAddonTaskArgs.scala @@ -10,6 +10,7 @@ import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} final case class ScheduledAddonTaskArgs(collective: CollectiveId, addonTaskId: Ident) + extends TaskArguments object ScheduledAddonTaskArgs { val taskName: Ident = Ident.unsafe("addon-scheduled-task") diff --git a/modules/common/src/main/scala/docspell/common/TaskArguments.scala b/modules/common/src/main/scala/docspell/common/TaskArguments.scala new file mode 100644 index 00000000..86117b0c --- /dev/null +++ b/modules/common/src/main/scala/docspell/common/TaskArguments.scala @@ -0,0 +1,8 @@ +package docspell.common + +/** A marker trait for task arguments. + * + * Arguments for tasks are stored as a JSON blob in the database. Changes in structure + * requires a corresponding database migration. + */ +trait TaskArguments diff --git a/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicDueItemsArgs.scala b/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicDueItemsArgs.scala index e71253b3..d2c7163f 100644 --- a/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicDueItemsArgs.scala +++ b/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicDueItemsArgs.scala @@ -28,7 +28,7 @@ final case class PeriodicDueItemsArgs( tagsInclude: List[Ident], tagsExclude: List[Ident], baseUrl: Option[LenientUri] -) +) extends TaskArguments object PeriodicDueItemsArgs { val taskName = Ident.unsafe("periodic-due-items-notify2") diff --git a/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicQueryArgs.scala b/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicQueryArgs.scala index 01f4497d..7d651935 100644 --- a/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicQueryArgs.scala +++ b/modules/notification/api/src/main/scala/docspell/notification/api/PeriodicQueryArgs.scala @@ -20,7 +20,7 @@ final case class PeriodicQueryArgs( bookmark: Option[String], baseUrl: Option[LenientUri], contentStart: Option[String] -) +) extends TaskArguments object PeriodicQueryArgs { val taskName = Ident.unsafe("periodic-query-notify2") diff --git a/modules/store/src/main/scala/db/migration/common/JsonCodecs.scala b/modules/store/src/main/scala/db/migration/common/JsonCodecs.scala new file mode 100644 index 00000000..7b1dc61f --- /dev/null +++ b/modules/store/src/main/scala/db/migration/common/JsonCodecs.scala @@ -0,0 +1,14 @@ +package db.migration.common + +import emil.MailAddress +import emil.javamail.syntax._ +import io.circe.{Decoder, Encoder} + +trait JsonCodecs { + + implicit val jsonEncoder: Encoder[MailAddress] = + Encoder.encodeString.contramap(_.asUnicodeString) + implicit val jsonDecoder: Decoder[MailAddress] = + Decoder.decodeString.emap(MailAddress.parse) + +} diff --git a/modules/store/src/main/scala/db/migration/MigrationTasks.scala b/modules/store/src/main/scala/db/migration/common/MigrateDueItemTasks.scala similarity index 55% rename from modules/store/src/main/scala/db/migration/MigrationTasks.scala rename to modules/store/src/main/scala/db/migration/common/MigrateDueItemTasks.scala index 7a803e3c..6f246718 100644 --- a/modules/store/src/main/scala/db/migration/MigrationTasks.scala +++ b/modules/store/src/main/scala/db/migration/common/MigrateDueItemTasks.scala @@ -1,21 +1,13 @@ -/* - * Copyright 2020 Eike K. & Contributors - * - * SPDX-License-Identifier: AGPL-3.0-or-later - */ - -package db.migration +package db.migration.common import cats.data.{NonEmptyList, OptionT} import cats.effect.{IO, Sync} import cats.implicits._ - import docspell.common._ import docspell.common.syntax.StringSyntax._ import docspell.notification.api._ import docspell.store.queries.QLogin import docspell.store.records._ - import db.migration.data.{ PeriodicDueItemsArgs => PeriodicDueItemsArgsLegacy, PeriodicQueryArgs => PeriodicQueryArgsLegacy, @@ -23,31 +15,15 @@ import db.migration.data.{ } import doobie._ import doobie.implicits._ -import doobie.util.transactor.Strategy -import emil.MailAddress -import emil.javamail.syntax._ import io.circe.syntax._ -import io.circe.{Decoder, Encoder} import org.flywaydb.core.api.migration.Context -trait MigrationTasks { +object MigrateDueItemTasks extends TransactorSupport with JsonCodecs { - def logger: org.log4s.Logger + val logger = docspell.logging.getLogger[IO] - implicit val jsonEncoder: Encoder[MailAddress] = - Encoder.encodeString.contramap(_.asUnicodeString) - implicit val jsonDecoder: Decoder[MailAddress] = - Decoder.decodeString.emap(MailAddress.parse) - - def migrateDueItemTasks: ConnectionIO[Unit] = - for { - tasks <- RPeriodicTask.findByTask(NotifyDueItemsArgs.taskName) - _ <- Sync[ConnectionIO].delay( - logger.info(s"Starting to migrate ${tasks.size} user tasks") - ) - _ <- tasks.traverse(migrateDueItemTask1) - _ <- RPeriodicTask.setEnabledByTask(NotifyDueItemsArgs.taskName, false) - } yield () + def run(ctx: Context) = + migratePeriodicItemTasks.transact(mkTransactor(ctx)) def migratePeriodicItemTasks: ConnectionIO[Unit] = for { @@ -137,75 +113,4 @@ trait MigrationTasks { ref = r.asRef } yield ref) .getOrElseF(Sync[ConnectionIO].raiseError(new Exception("User not found!"))) - - private def migrateDueItemTask1(old: RPeriodicTask): ConnectionIO[Int] = { - val converted = old.args - .parseJsonAs[NotifyDueItemsArgs] - .leftMap(_.getMessage()) - .map(convertArgs) - - converted match { - case Right(args) => - val task = args - .semiflatMap(a => - RPeriodicTask - .updateTask( - old.id, - PeriodicDueItemsArgsLegacy.taskName, - a.asJson.noSpaces - ) - ) - .getOrElse(0) - - Sync[ConnectionIO].delay(logger.info(s"Converting user task: $old")) *> task - - case Left(err) => - logger.error(s"Error converting user task: $old. $err") - 0.pure[ConnectionIO] - } - } - - private def convertArgs( - old: NotifyDueItemsArgs - ): OptionT[ConnectionIO, PeriodicDueItemsArgsLegacy] = { - val recs = old.recipients - .map(MailAddress.parse) - .flatMap { - case Right(m) => Some(m) - case Left(err) => - logger.warn(s"Cannot read mail address: $err. Skip this while migrating.") - None - } - - for { - userData <- OptionT(QLogin.findAccount(old.account)) - userId = userData.userId - id <- OptionT.liftF(Ident.randomId[ConnectionIO]) - now <- OptionT.liftF(Timestamp.current[ConnectionIO]) - chName = Some("migrate notify items") - ch = RNotificationChannelMail( - id, - userId, - chName, - old.smtpConnection, - recs, - now - ) - _ <- OptionT.liftF(RNotificationChannelMail.insert(ch)) - args = PeriodicDueItemsArgsLegacy( - old.account, - NonEmptyList.of(ChannelRef(ch.id, ChannelType.Mail, chName)), - old.remindDays, - old.daysBack, - old.tagsInclude, - old.tagsExclude, - old.itemDetailUrl - ) - } yield args - } - - def mkTransactor(ctx: Context): Transactor[IO] = { - val xa = Transactor.fromConnection[IO](ctx.getConnection) - Transactor.strategy.set(xa, Strategy.void) // transactions are handled by flyway - } } diff --git a/modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala b/modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala new file mode 100644 index 00000000..5230086d --- /dev/null +++ b/modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala @@ -0,0 +1,102 @@ +package db.migration.common + +import cats.data.{NonEmptyList, OptionT} +import cats.effect.{IO, Sync} +import cats.implicits._ + +import docspell.common._ +import docspell.common.syntax.StringSyntax._ +import docspell.notification.api._ +import docspell.store.queries.QLogin +import docspell.store.records._ + +import db.migration.data.{PeriodicDueItemsArgs => PeriodicDueItemsArgsLegacy, _} +import doobie._ +import doobie.implicits._ +import emil.MailAddress +import emil.javamail.syntax._ +import io.circe.syntax._ +import org.flywaydb.core.api.migration.Context + +object MigrateNotifyTasks extends TransactorSupport { + val logger = docspell.logging.getLogger[IO] + + def run(ctx: Context): IO[Unit] = + migrateDueItemTasks.transact(mkTransactor(ctx)) + + def migrateDueItemTasks: ConnectionIO[Unit] = + for { + tasks <- RPeriodicTask.findByTask(NotifyDueItemsArgs.taskName) + _ <- Sync[ConnectionIO].delay( + logger.info(s"Starting to migrate ${tasks.size} user tasks") + ) + _ <- tasks.traverse(migrateDueItemTask1) + _ <- RPeriodicTask.setEnabledByTask(NotifyDueItemsArgs.taskName, false) + } yield () + + private def migrateDueItemTask1(old: RPeriodicTask): ConnectionIO[Int] = { + val converted = old.args + .parseJsonAs[NotifyDueItemsArgs] + .leftMap(_.getMessage()) + .map(convertArgs) + + converted match { + case Right(args) => + val task = args + .semiflatMap(a => + RPeriodicTask + .updateTask( + old.id, + PeriodicDueItemsArgsLegacy.taskName, + a.asJson.noSpaces + ) + ) + .getOrElse(0) + + Sync[ConnectionIO].delay(logger.info(s"Converting user task: $old")) *> task + + case Left(err) => + logger.error(s"Error converting user task: $old. $err") + 0.pure[ConnectionIO] + } + } + + private def convertArgs( + old: NotifyDueItemsArgs + ): OptionT[ConnectionIO, PeriodicDueItemsArgsLegacy] = { + val recs = old.recipients + .map(MailAddress.parse) + .flatMap { + case Right(m) => Some(m) + case Left(err) => + logger.warn(s"Cannot read mail address: $err. Skip this while migrating.") + None + } + + for { + userData <- OptionT(QLogin.findAccount(old.account)) + userId = userData.userId + id <- OptionT.liftF(Ident.randomId[ConnectionIO]) + now <- OptionT.liftF(Timestamp.current[ConnectionIO]) + chName = Some("migrate notify items") + ch = RNotificationChannelMail( + id, + userId, + chName, + old.smtpConnection, + recs, + now + ) + _ <- OptionT.liftF(RNotificationChannelMail.insert(ch)) + args = PeriodicDueItemsArgsLegacy( + old.account, + NonEmptyList.of(ChannelRef(ch.id, ChannelType.Mail, chName)), + old.remindDays, + old.daysBack, + old.tagsInclude, + old.tagsExclude, + old.itemDetailUrl + ) + } yield args + } +} diff --git a/modules/store/src/main/scala/db/migration/common/TransactorSupport.scala b/modules/store/src/main/scala/db/migration/common/TransactorSupport.scala new file mode 100644 index 00000000..dd952ebb --- /dev/null +++ b/modules/store/src/main/scala/db/migration/common/TransactorSupport.scala @@ -0,0 +1,17 @@ +package db.migration.common + +import cats.effect.IO +import docspell.logging.Logger +import doobie.util.transactor.{Strategy, Transactor} +import org.flywaydb.core.api.migration.Context + +trait TransactorSupport { + + def logger: Logger[IO] + + def mkTransactor(ctx: Context): Transactor[IO] = { + val xa = Transactor.fromConnection[IO](ctx.getConnection) + logger.asUnsafe.info(s"Creating transactor for db migrations from connection: $xa") + Transactor.strategy.set(xa, Strategy.void) // transactions are handled by flyway + } +} diff --git a/modules/store/src/main/scala/db/migration/h2/V1_29_2__MigrateNotifyTask.scala b/modules/store/src/main/scala/db/migration/h2/V1_29_2__MigrateNotifyTask.scala index ef85e970..386a4d4d 100644 --- a/modules/store/src/main/scala/db/migration/h2/V1_29_2__MigrateNotifyTask.scala +++ b/modules/store/src/main/scala/db/migration/h2/V1_29_2__MigrateNotifyTask.scala @@ -7,17 +7,13 @@ package db.migration.h2 import cats.effect.unsafe.implicits._ - -import db.migration.MigrationTasks -import doobie.implicits._ +import db.migration.common.MigrateNotifyTasks import org.flywaydb.core.api.migration.BaseJavaMigration import org.flywaydb.core.api.migration.Context -class V1_29_2__MigrateNotifyTask extends BaseJavaMigration with MigrationTasks { +class V1_29_2__MigrateNotifyTask extends BaseJavaMigration { val logger = org.log4s.getLogger - override def migrate(ctx: Context): Unit = { - val xa = mkTransactor(ctx) - migrateDueItemTasks.transact(xa).unsafeRunSync() - } + override def migrate(ctx: Context): Unit = + MigrateNotifyTasks.run(ctx).unsafeRunSync() } diff --git a/modules/store/src/main/scala/db/migration/h2/V1_32_2__MigrateChannels.scala b/modules/store/src/main/scala/db/migration/h2/V1_32_2__MigrateChannels.scala index 83983d4a..7d470987 100644 --- a/modules/store/src/main/scala/db/migration/h2/V1_32_2__MigrateChannels.scala +++ b/modules/store/src/main/scala/db/migration/h2/V1_32_2__MigrateChannels.scala @@ -7,16 +7,12 @@ package db.migration.h2 import cats.effect.unsafe.implicits._ - -import db.migration.MigrationTasks -import doobie.implicits._ +import db.migration.common.MigrateDueItemTasks import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} -class V1_32_2__MigrateChannels extends BaseJavaMigration with MigrationTasks { +class V1_32_2__MigrateChannels extends BaseJavaMigration { val logger = org.log4s.getLogger - override def migrate(ctx: Context): Unit = { - val xa = mkTransactor(ctx) - migratePeriodicItemTasks.transact(xa).unsafeRunSync() - } + override def migrate(ctx: Context): Unit = + MigrateDueItemTasks.run(ctx).unsafeRunSync() } diff --git a/modules/store/src/main/scala/db/migration/mariadb/V1_29_2__MigrateNotifyTask.scala b/modules/store/src/main/scala/db/migration/mariadb/V1_29_2__MigrateNotifyTask.scala index 4908ff15..bcdaa170 100644 --- a/modules/store/src/main/scala/db/migration/mariadb/V1_29_2__MigrateNotifyTask.scala +++ b/modules/store/src/main/scala/db/migration/mariadb/V1_29_2__MigrateNotifyTask.scala @@ -7,17 +7,13 @@ package db.migration.mariadb import cats.effect.unsafe.implicits._ - -import db.migration.MigrationTasks -import doobie.implicits._ +import db.migration.common.MigrateNotifyTasks import org.flywaydb.core.api.migration.BaseJavaMigration import org.flywaydb.core.api.migration.Context -class V1_29_2__MigrateNotifyTask extends BaseJavaMigration with MigrationTasks { +class V1_29_2__MigrateNotifyTask extends BaseJavaMigration { val logger = org.log4s.getLogger - override def migrate(ctx: Context): Unit = { - val xa = mkTransactor(ctx) - migrateDueItemTasks.transact(xa).unsafeRunSync() - } + override def migrate(ctx: Context): Unit = + MigrateNotifyTasks.run(ctx).unsafeRunSync() } diff --git a/modules/store/src/main/scala/db/migration/mariadb/V1_32_2__MigrateChannels.scala b/modules/store/src/main/scala/db/migration/mariadb/V1_32_2__MigrateChannels.scala index ee524572..0efa9dd1 100644 --- a/modules/store/src/main/scala/db/migration/mariadb/V1_32_2__MigrateChannels.scala +++ b/modules/store/src/main/scala/db/migration/mariadb/V1_32_2__MigrateChannels.scala @@ -7,16 +7,13 @@ package db.migration.mariadb import cats.effect.unsafe.implicits._ - -import db.migration.MigrationTasks -import doobie.implicits._ +import db.migration.common.MigrateDueItemTasks import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} -class V1_32_2__MigrateChannels extends BaseJavaMigration with MigrationTasks { +class V1_32_2__MigrateChannels extends BaseJavaMigration { val logger = org.log4s.getLogger - override def migrate(ctx: Context): Unit = { - val xa = mkTransactor(ctx) - migratePeriodicItemTasks.transact(xa).unsafeRunSync() - } + override def migrate(ctx: Context): Unit = + MigrateDueItemTasks.run(ctx).unsafeRunSync() + } diff --git a/modules/store/src/main/scala/db/migration/postgresql/V1_29_2__MigrateNotifyTask.scala b/modules/store/src/main/scala/db/migration/postgresql/V1_29_2__MigrateNotifyTask.scala index 119a71e4..8bb2e132 100644 --- a/modules/store/src/main/scala/db/migration/postgresql/V1_29_2__MigrateNotifyTask.scala +++ b/modules/store/src/main/scala/db/migration/postgresql/V1_29_2__MigrateNotifyTask.scala @@ -7,17 +7,13 @@ package db.migration.postgresql import cats.effect.unsafe.implicits._ - -import db.migration.MigrationTasks -import doobie.implicits._ +import db.migration.common.MigrateNotifyTasks import org.flywaydb.core.api.migration.BaseJavaMigration import org.flywaydb.core.api.migration.Context -class V1_29_2__MigrateNotifyTask extends BaseJavaMigration with MigrationTasks { +class V1_29_2__MigrateNotifyTask extends BaseJavaMigration { val logger = org.log4s.getLogger - override def migrate(ctx: Context): Unit = { - val xa = mkTransactor(ctx) - migrateDueItemTasks.transact(xa).unsafeRunSync() - } + override def migrate(ctx: Context): Unit = + MigrateNotifyTasks.run(ctx).unsafeRunSync() } diff --git a/modules/store/src/main/scala/db/migration/postgresql/V1_32_2__MigrateChannels.scala b/modules/store/src/main/scala/db/migration/postgresql/V1_32_2__MigrateChannels.scala index e887f510..62703333 100644 --- a/modules/store/src/main/scala/db/migration/postgresql/V1_32_2__MigrateChannels.scala +++ b/modules/store/src/main/scala/db/migration/postgresql/V1_32_2__MigrateChannels.scala @@ -7,16 +7,12 @@ package db.migration.postgresql import cats.effect.unsafe.implicits._ - -import db.migration.MigrationTasks -import doobie.implicits._ +import db.migration.common.MigrateDueItemTasks import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} -class V1_32_2__MigrateChannels extends BaseJavaMigration with MigrationTasks { +class V1_32_2__MigrateChannels extends BaseJavaMigration { val logger = org.log4s.getLogger - override def migrate(ctx: Context): Unit = { - val xa = mkTransactor(ctx) - migratePeriodicItemTasks.transact(xa).unsafeRunSync() - } + override def migrate(ctx: Context): Unit = + MigrateDueItemTasks.run(ctx).unsafeRunSync() } From 7424a9b2f9a0a97c100ee35918965617b526a1b5 Mon Sep 17 00:00:00 2001 From: eikek Date: Sun, 7 Aug 2022 13:13:22 +0200 Subject: [PATCH 09/15] Migrate background task to new collective id --- .../docspell/common/FileCopyTaskArgs.scala | 2 +- .../scala/docspell/common/FileKeyPart.scala | 7 +- .../docspell/common/syntax/StringSyntax.scala | 4 +- .../common/MigrateCollectiveIdTaskArgs.scala | 457 ++++++++++++++++++ .../common/MigrateDueItemTasks.scala | 10 +- .../migration/common/MigrateNotifyTasks.scala | 35 +- .../data/FileIntegrityCheckArgs.scala | 84 ++++ .../db/migration/data/ProcessItemArgs.scala | 4 +- .../data/ScheduledAddonTaskArgs.scala | 3 - .../migration/h2/V1_39_2__MigrateTasks.scala | 18 + .../mariadb/V1_39_2__MigrateTasks.scala | 18 + .../postgresql/V1_39_2__MigrateTasks.scala | 18 + .../docspell/store/file/BinnyUtils.scala | 4 +- .../main/scala/docspell/store/qb/DML.scala | 2 +- .../scala/docspell/store/records/RJob.scala | 24 + .../store/records/RPeriodicTask.scala | 9 +- 16 files changed, 671 insertions(+), 28 deletions(-) create mode 100644 modules/store/src/main/scala/db/migration/common/MigrateCollectiveIdTaskArgs.scala create mode 100644 modules/store/src/main/scala/db/migration/data/FileIntegrityCheckArgs.scala create mode 100644 modules/store/src/main/scala/db/migration/h2/V1_39_2__MigrateTasks.scala create mode 100644 modules/store/src/main/scala/db/migration/mariadb/V1_39_2__MigrateTasks.scala create mode 100644 modules/store/src/main/scala/db/migration/postgresql/V1_39_2__MigrateTasks.scala diff --git a/modules/common/src/main/scala/docspell/common/FileCopyTaskArgs.scala b/modules/common/src/main/scala/docspell/common/FileCopyTaskArgs.scala index 6d04faa1..843e57cc 100644 --- a/modules/common/src/main/scala/docspell/common/FileCopyTaskArgs.scala +++ b/modules/common/src/main/scala/docspell/common/FileCopyTaskArgs.scala @@ -14,7 +14,7 @@ import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.syntax._ import io.circe.{Decoder, Encoder} -/** This is the input to the `FileCopyTask`. The task copies all files from on +/** This is the input to the `FileCopyTask`. The task copies all files from one * FileRepository to one ore more target repositories. * * If no `from` is given, the default file repository is used. For targets, a list of ids diff --git a/modules/common/src/main/scala/docspell/common/FileKeyPart.scala b/modules/common/src/main/scala/docspell/common/FileKeyPart.scala index fab01ab0..8693f07f 100644 --- a/modules/common/src/main/scala/docspell/common/FileKeyPart.scala +++ b/modules/common/src/main/scala/docspell/common/FileKeyPart.scala @@ -17,9 +17,10 @@ object FileKeyPart { case object Empty extends FileKeyPart - final case class Collective(collective: Ident) extends FileKeyPart + final case class Collective(collective: CollectiveId) extends FileKeyPart - final case class Category(collective: Ident, category: FileCategory) extends FileKeyPart + final case class Category(collective: CollectiveId, category: FileCategory) + extends FileKeyPart final case class Key(key: FileKey) extends FileKeyPart @@ -37,7 +38,7 @@ object FileKeyPart { implicit val jsonDecoder: Decoder[FileKeyPart] = Decoder.instance { cursor => for { - cid <- cursor.getOrElse[Option[Ident]]("collective")(None) + cid <- cursor.getOrElse[Option[CollectiveId]]("collective")(None) cat <- cursor.getOrElse[Option[FileCategory]]("category")(None) emptyObj = cursor.keys.exists(_.isEmpty) diff --git a/modules/common/src/main/scala/docspell/common/syntax/StringSyntax.scala b/modules/common/src/main/scala/docspell/common/syntax/StringSyntax.scala index 5e0038da..d1e27c4b 100644 --- a/modules/common/src/main/scala/docspell/common/syntax/StringSyntax.scala +++ b/modules/common/src/main/scala/docspell/common/syntax/StringSyntax.scala @@ -15,9 +15,7 @@ trait StringSyntax { Option(s).filter(_.trim.nonEmpty) def parseJsonAs[A](implicit d: Decoder[A]): Either[Throwable, A] = - for { - json <- parser.decode[A](s) - } yield json + parser.decode[A](s) } } diff --git a/modules/store/src/main/scala/db/migration/common/MigrateCollectiveIdTaskArgs.scala b/modules/store/src/main/scala/db/migration/common/MigrateCollectiveIdTaskArgs.scala new file mode 100644 index 00000000..d3522c7e --- /dev/null +++ b/modules/store/src/main/scala/db/migration/common/MigrateCollectiveIdTaskArgs.scala @@ -0,0 +1,457 @@ +package db.migration.common + +import cats.syntax.all._ +import cats.effect._ +import docspell.common._ +import docspell.store.records.{RCollective, RJob, RPeriodicTask, RUser} +import doobie._ +import doobie.implicits._ +import db.migration.data.{ + AllPreviewsArgs => AllPreviewArgsLegacy, + ConvertAllPdfArgs => ConvertAllPdfArgsLegacy, + DownloadZipArgs => DownloadZipArgsLegacy, + EmptyTrashArgs => EmptyTrashArgsLegacy, + FileIntegrityCheckArgs => FileIntegrityCheckArgsLegacy, + ItemAddonTaskArgs => ItemAddonTaskArgsLegacy, + LearnClassifierArgs => LearnClassifierArgsLegacy, + PeriodicDueItemsArgs => PeriodicDueItemsArgsLegacy, + PeriodicQueryArgs => PeriodicQueryArgsLegacy, + ProcessItemArgs => ProcessItemArgsLegacy, + ReIndexTaskArgs => ReIndexTaskArgsLegacy, + ScanMailboxArgs => ScanMailboxArgsLegacy, + ScheduledAddonTaskArgs => ScheduledAddonTaskArgsLegacy +} +import docspell.notification.api.{PeriodicDueItemsArgs, PeriodicQueryArgs} +import docspell.store.qb._ +import docspell.store.qb.DSL._ +import io.circe.{Decoder, Encoder, parser} +import org.flywaydb.core.api.migration.Context + +object MigrateCollectiveIdTaskArgs extends TransactorSupport { + + val logger = docspell.logging.getLogger[IO] + val loggerCIO = docspell.logging.getLogger[ConnectionIO] + + def run(ctx: Context): IO[Unit] = { + val tx = mkTransactor(ctx) + migrateAllPreviewsArgs(tx) + } + + def migrateAllPreviewsArgs(xa: Transactor[IO]) = + for { + _ <- logger.info(s"Loading account info mappings") + idMap <- collectiveIdMapping.transact(xa) + accMap <- accountInfoMapping.transact(xa) + + _ <- logger.info("Converting job and periodic task arguments") + _ <- convertJob[ScheduledAddonTaskArgsLegacy, ScheduledAddonTaskArgs]( + ScheduledAddonTaskArgsLegacy.taskName, + convertScheduledAddonTaskArgs(idMap) + ).transact(xa) + _ <- convertPeriodicJob[ScheduledAddonTaskArgsLegacy, ScheduledAddonTaskArgs]( + ScheduledAddonTaskArgsLegacy.taskName, + convertScheduledAddonTaskArgs(idMap) + ).transact(xa) + + _ <- convertJob[ReIndexTaskArgsLegacy, ReIndexTaskArgs]( + ReIndexTaskArgsLegacy.taskName, + convertReIndexTaskArgs(idMap) + ).transact(xa) + + _ <- convertJob[ProcessItemArgsLegacy, ProcessItemArgs]( + ProcessItemArgsLegacy.taskName, + convertProcessItemArgs(idMap) + ).transact(xa) + _ <- convertJob[ProcessItemArgsLegacy, ProcessItemArgs]( + ProcessItemArgsLegacy.multiUploadTaskName, + convertProcessItemArgs(idMap) + ).transact(xa) + + _ <- convertJob[LearnClassifierArgsLegacy, LearnClassifierArgs]( + LearnClassifierArgsLegacy.taskName, + convertLearnClassifierArgs(idMap) + ).transact(xa) + _ <- convertPeriodicJob[LearnClassifierArgsLegacy, LearnClassifierArgs]( + LearnClassifierArgsLegacy.taskName, + convertLearnClassifierArgs(idMap) + ).transact(xa) + + _ <- convertJob[ItemAddonTaskArgsLegacy, ItemAddonTaskArgs]( + ItemAddonTaskArgsLegacy.taskName, + convertItemAddonTaskArgs(idMap) + ).transact(xa) + + _ <- convertJob[FileIntegrityCheckArgsLegacy, FileIntegrityCheckArgs]( + FileIntegrityCheckArgsLegacy.taskName, + convertFileIntegrityCheckArgs(idMap) + ).transact(xa) + + _ <- convertJob[EmptyTrashArgsLegacy, EmptyTrashArgs]( + EmptyTrashArgsLegacy.taskName, + convertEmptyTrashArgs(idMap) + ).transact(xa) + _ <- convertPeriodicJob[EmptyTrashArgsLegacy, EmptyTrashArgs]( + EmptyTrashArgsLegacy.taskName, + convertEmptyTrashArgs(idMap) + ).transact(xa) + + _ <- convertJob[AllPreviewArgsLegacy, AllPreviewsArgs]( + AllPreviewArgsLegacy.taskName, + convertAllPreviewsArgs(idMap) + ).transact(xa) + + _ <- convertJob[ConvertAllPdfArgsLegacy, ConvertAllPdfArgs]( + ConvertAllPdfArgsLegacy.taskName, + convertAllPdfArgs(idMap) + ).transact(xa) + + _ <- convertJob[ScanMailboxArgsLegacy, ScanMailboxArgs]( + ScanMailboxArgsLegacy.taskName, + convertScanMailboxArgs(accMap) + ).transact(xa) + _ <- convertPeriodicJob[ScanMailboxArgsLegacy, ScanMailboxArgs]( + ScanMailboxArgsLegacy.taskName, + convertScanMailboxArgs(accMap) + ).transact(xa) + + _ <- convertJob[PeriodicDueItemsArgsLegacy, PeriodicDueItemsArgs]( + PeriodicDueItemsArgsLegacy.taskName, + convertPeriodicDueItemsArgs(accMap) + ).transact(xa) + _ <- convertPeriodicJob[PeriodicDueItemsArgsLegacy, PeriodicDueItemsArgs]( + PeriodicDueItemsArgsLegacy.taskName, + convertPeriodicDueItemsArgs(accMap) + ).transact(xa) + + _ <- convertJob[PeriodicQueryArgsLegacy, PeriodicQueryArgs]( + PeriodicQueryArgs.taskName, + convertPeriodicQueryArgs(accMap) + ).transact(xa) + _ <- convertPeriodicJob[PeriodicQueryArgsLegacy, PeriodicQueryArgs]( + PeriodicQueryArgs.taskName, + convertPeriodicQueryArgs(accMap) + ).transact(xa) + + // The new DownloadZipArgs are not in scope here. These jobs are deleted, as they are + // done in 99% probably. If not a user will just click again on the "download all" + // button + _ <- RJob.deleteByTask(DownloadZipArgsLegacy.taskName).transact(xa) + _ <- logger.info("Done converting task arguments.") + + _ <- logger.info("Updating job submitter info") + _ <- updateJobSubmitter(idMap, accMap).transact(xa) + } yield () + + def updateJobSubmitter( + idMap: Map[Ident, CollectiveId], + accMap: Map[AccountId, AccountInfo] + ) = { + val job = RJob.as("j") + val pt = RPeriodicTask.as("pt") + + val updateUser = + accMap.toList.traverse_ { case (accId, accInfo) => + val up1 = + DML.update( + job, + job.group === accId.collective && job.submitter === accId.user, + DML.set( + job.group.setTo(accInfo.collectiveId.valueAsIdent), + job.submitter.setTo(accInfo.userId) + ) + ) + + val up2 = + DML.update( + pt, + pt.group === accId.collective && pt.submitter === accId.user, + DML.set( + pt.group.setTo(accInfo.collectiveId.valueAsIdent), + pt.submitter.setTo(accInfo.userId) + ) + ) + + val up3 = + DML.update( + job, + job.group === accId.collective && job.submitter === accId.collective, + DML.set( + job.group.setTo(accInfo.collectiveId.valueAsIdent), + job.submitter.setTo(accInfo.collectiveId.valueAsIdent) + ) + ) + + val up4 = + DML.update( + pt, + pt.group === accId.collective && pt.submitter === accId.collective, + DML.set( + pt.group.setTo(accInfo.collectiveId.valueAsIdent), + pt.submitter.setTo(accInfo.collectiveId.valueAsIdent) + ) + ) + + up1 *> up2 *> up3 *> up4 + } + + val updateJobGroup = + idMap.toList.traverse_ { case (cname, cid) => + val up1 = + DML.update(job, job.group === cname, DML.set(job.group.setTo(cid.valueAsIdent))) + val up2 = + DML.update(pt, pt.group === cname, DML.set(pt.group.setTo(cid.valueAsIdent))) + up1 *> up2 + } + + updateUser *> updateJobGroup + } + + def convertJob[S: Decoder, T: Encoder]( + task: Ident, + conv: S => Option[T] + ): ConnectionIO[Int] = + for { + jobs <- RJob.findByTaskName(task) + converted = + jobs.traverse(j => + convertJobArgs(j)(conv) + .map(_.pure[ConnectionIO]) + .recoverWith { + case ex if JobState.isDone(j.state) => + val cause = Option(ex.getCause).getOrElse(ex) + Either.right( + loggerCIO.warn( + s"Removing old job '${j.id.id}', because argument parsing failed: ${cause.getMessage}" + ) *> RJob.delete(j.id).as(Option.empty[RJob]) + ) + } + ) + conv <- Sync[ConnectionIO] + .pure(converted) + .rethrow + .flatMap(_.sequence) + .map(_.flatten) + _ <- conv.traverse_(j => RJob.setArgs(j.id, j.args)) + } yield conv.size + + def convertPeriodicJob[S: Decoder, T: Encoder]( + task: Ident, + conv: S => Option[T] + ): ConnectionIO[Int] = + for { + jobs <- RPeriodicTask.findByTask(task) + converted <- Sync[ConnectionIO] + .pure(jobs.traverse(j => convertPeriodicJobArgs(j)(conv))) + .rethrow + .map(_.flatten) + _ <- converted.traverse_(j => RPeriodicTask.setArgs(j.id, j.args)) + } yield converted.size + + private def convertPeriodicDueItemsArgs(accMap: Map[AccountId, AccountInfo])( + oldArgs: PeriodicDueItemsArgsLegacy + ) = + accMap + .get(oldArgs.account) + .map(acc => + PeriodicDueItemsArgs( + account = acc, + channels = oldArgs.channels, + remindDays = oldArgs.remindDays, + daysBack = oldArgs.daysBack, + tagsInclude = oldArgs.tagsInclude, + tagsExclude = oldArgs.tagsExclude, + baseUrl = oldArgs.baseUrl + ) + ) + + private def convertPeriodicQueryArgs( + accMap: Map[AccountId, AccountInfo] + )(oldArgs: PeriodicQueryArgsLegacy) = + accMap + .get(oldArgs.account) + .map(acc => + PeriodicQueryArgs( + account = acc, + channels = oldArgs.channels, + query = oldArgs.query, + bookmark = oldArgs.bookmark, + baseUrl = oldArgs.baseUrl, + contentStart = oldArgs.contentStart + ) + ) + + private def convertScanMailboxArgs( + accMap: Map[AccountId, AccountInfo] + )(oldArgs: ScanMailboxArgsLegacy) = + accMap + .get(oldArgs.account) + .map(acc => + ScanMailboxArgs( + account = acc, + imapConnection = oldArgs.imapConnection, + scanRecursively = oldArgs.scanRecursively, + folders = oldArgs.folders, + receivedSince = oldArgs.receivedSince, + targetFolder = oldArgs.targetFolder, + deleteMail = oldArgs.deleteMail, + direction = oldArgs.direction, + itemFolder = oldArgs.itemFolder, + fileFilter = oldArgs.fileFilter, + tags = oldArgs.tags, + subjectFilter = oldArgs.subjectFilter, + language = oldArgs.language, + postHandleAll = oldArgs.postHandleAll, + attachmentsOnly = oldArgs.attachmentsOnly + ) + ) + + private def convertScheduledAddonTaskArgs(idMap: Map[Ident, CollectiveId])( + oldArgs: ScheduledAddonTaskArgsLegacy + ) = + idMap + .get(oldArgs.collective) + .map(cid => ScheduledAddonTaskArgs(cid, oldArgs.addonTaskId)) + + private def convertReIndexTaskArgs( + idMap: Map[Ident, CollectiveId] + )(oldArgs: ReIndexTaskArgsLegacy) = + oldArgs.collective.flatMap { cname => + idMap + .get(cname) + .map(cid => ReIndexTaskArgs(cid.some)) + } + + private def convertProcessItemArgs(idMap: Map[Ident, CollectiveId])( + oldArgs: ProcessItemArgsLegacy + ) = + idMap + .get(oldArgs.meta.collective) + .map(cid => + ProcessItemArgs( + ProcessItemArgs.ProcessMeta( + collective = cid, + itemId = oldArgs.meta.itemId, + language = oldArgs.meta.language, + direction = oldArgs.meta.direction, + sourceAbbrev = oldArgs.meta.sourceAbbrev, + folderId = oldArgs.meta.folderId, + validFileTypes = oldArgs.meta.validFileTypes, + skipDuplicate = oldArgs.meta.skipDuplicate, + fileFilter = oldArgs.meta.fileFilter, + tags = oldArgs.meta.tags, + reprocess = oldArgs.meta.reprocess, + attachmentsOnly = oldArgs.meta.attachmentsOnly + ), + oldArgs.files.map(f => + ProcessItemArgs + .File(f.name, FileKey(cid, f.fileMetaId.category, f.fileMetaId.id)) + ) + ) + ) + + private def convertLearnClassifierArgs(idMap: Map[Ident, CollectiveId])( + oldArgs: LearnClassifierArgsLegacy + ) = + idMap + .get(oldArgs.collective) + .map(cid => LearnClassifierArgs(cid)) + + private def convertItemAddonTaskArgs(idMap: Map[Ident, CollectiveId])( + oldArgs: ItemAddonTaskArgsLegacy + ) = + idMap + .get(oldArgs.collective) + .map(cid => ItemAddonTaskArgs(cid, oldArgs.itemId, oldArgs.addonRunConfigs)) + + private def convertFileIntegrityCheckArgs(idMap: Map[Ident, CollectiveId])( + oldArgs: FileIntegrityCheckArgsLegacy + ) = + oldArgs.pattern match { + case FileIntegrityCheckArgsLegacy.FileKeyPart.Key(key) => + idMap + .get(key.collective) + .map(cid => + FileIntegrityCheckArgs(FileKeyPart.Key(FileKey(cid, key.category, key.id))) + ) + + case FileIntegrityCheckArgsLegacy.FileKeyPart.Collective(cname) => + idMap + .get(cname) + .map(cid => FileIntegrityCheckArgs(FileKeyPart.Collective(cid))) + + case FileIntegrityCheckArgsLegacy.FileKeyPart.Category(cname, category) => + idMap + .get(cname) + .map(cid => FileIntegrityCheckArgs(FileKeyPart.Category(cid, category))) + + case FileIntegrityCheckArgsLegacy.FileKeyPart.Empty => + None + } + + private def convertEmptyTrashArgs(idMap: Map[Ident, CollectiveId])( + oldArgs: EmptyTrashArgsLegacy + ) = + idMap.get(oldArgs.collective).map(cid => EmptyTrashArgs(cid, oldArgs.minAge)) + + private def convertAllPreviewsArgs(idMap: Map[Ident, CollectiveId])( + oldArgs: AllPreviewArgsLegacy + ) = + oldArgs.collective.flatMap { cname => + idMap + .get(cname) + .map(cid => AllPreviewsArgs(cid.some, oldArgs.storeMode)) + } + + private def convertAllPdfArgs(idMap: Map[Ident, CollectiveId])( + oldArgs: ConvertAllPdfArgsLegacy + ) = + oldArgs.collective.flatMap(cname => + idMap.get(cname).map(cid => ConvertAllPdfArgs(cid.some)) + ) + + def convertJobArgs[S: Decoder, T: Encoder]( + job: RJob + )(update: S => Option[T]): Either[Throwable, Option[RJob]] = + for { + oldArgs <- parser + .decode[S](job.args) + .leftMap(err => + new IllegalStateException( + s"Error parsing arguments of job: ${job.id.id}", + err + ) + ) + upJob = update(oldArgs).map(j => job.withArgs(j)) + } yield upJob + + def convertPeriodicJobArgs[S: Decoder, T: Encoder]( + job: RPeriodicTask + )(update: S => Option[T]): Either[Throwable, Option[RPeriodicTask]] = + for { + oldArgs <- parser + .decode[S](job.args) + .leftMap(err => + new IllegalStateException( + s"Error parsing arguments of periodic task: ${job.id.id}", + err + ) + ) + upJob = update(oldArgs).map(j => job.withArgs(j)) + } yield upJob + + private def collectiveIdMapping: ConnectionIO[Map[Ident, CollectiveId]] = + RCollective.findAll(_.id).map(_.map(coll => coll.name -> coll.id).toMap) + + private def accountInfoMapping: ConnectionIO[Map[AccountId, AccountInfo]] = { + val u = RUser.as("u") + val c = RCollective.as("c") + + Select( + select(c.id, c.name, u.uid, u.login), + from(u).innerJoin(c, c.id === u.cid) + ).build + .query[AccountInfo] + .to[List] + .map(_.map(a => a.asAccountId -> a).toMap) + } +} diff --git a/modules/store/src/main/scala/db/migration/common/MigrateDueItemTasks.scala b/modules/store/src/main/scala/db/migration/common/MigrateDueItemTasks.scala index 6f246718..3a3dfbc4 100644 --- a/modules/store/src/main/scala/db/migration/common/MigrateDueItemTasks.scala +++ b/modules/store/src/main/scala/db/migration/common/MigrateDueItemTasks.scala @@ -6,8 +6,8 @@ import cats.implicits._ import docspell.common._ import docspell.common.syntax.StringSyntax._ import docspell.notification.api._ -import docspell.store.queries.QLogin import docspell.store.records._ +import docspell.store.qb.DSL._ import db.migration.data.{ PeriodicDueItemsArgs => PeriodicDueItemsArgsLegacy, PeriodicQueryArgs => PeriodicQueryArgsLegacy, @@ -103,8 +103,7 @@ object MigrateDueItemTasks extends TransactorSupport with JsonCodecs { private def saveChannel(ch: Channel, account: AccountId): ConnectionIO[ChannelRef] = (for { newId <- OptionT.liftF(Ident.randomId[ConnectionIO]) - userData <- OptionT(QLogin.findAccount(account)) - userId = userData.userId + userId <- OptionT(findIdByAccountId(account)) r <- RNotificationChannel.fromChannel(ch, newId, userId) _ <- OptionT.liftF(RNotificationChannel.insert(r)) _ <- OptionT.liftF( @@ -113,4 +112,9 @@ object MigrateDueItemTasks extends TransactorSupport with JsonCodecs { ref = r.asRef } yield ref) .getOrElseF(Sync[ConnectionIO].raiseError(new Exception("User not found!"))) + + def findIdByAccountId(accountId: AccountId): ConnectionIO[Option[Ident]] = + sql"select u.uid from user_ u where u.cid = ${accountId.collective} and u.login = ${accountId.user}" + .query[Ident] + .option } diff --git a/modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala b/modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala index 5230086d..1f9a4d4d 100644 --- a/modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala +++ b/modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala @@ -3,14 +3,13 @@ package db.migration.common import cats.data.{NonEmptyList, OptionT} import cats.effect.{IO, Sync} import cats.implicits._ - import docspell.common._ import docspell.common.syntax.StringSyntax._ import docspell.notification.api._ -import docspell.store.queries.QLogin -import docspell.store.records._ - +import docspell.store.records.{RNotificationChannelMail, RPeriodicTask} +import docspell.store.qb.DSL._ import db.migration.data.{PeriodicDueItemsArgs => PeriodicDueItemsArgsLegacy, _} +import docspell.store.qb.DML import doobie._ import doobie.implicits._ import emil.MailAddress @@ -74,20 +73,20 @@ object MigrateNotifyTasks extends TransactorSupport { } for { - userData <- OptionT(QLogin.findAccount(old.account)) - userId = userData.userId + userId <- OptionT(findIdByAccountId(old.account)) id <- OptionT.liftF(Ident.randomId[ConnectionIO]) now <- OptionT.liftF(Timestamp.current[ConnectionIO]) + connId <- OptionT(findSmtpConnectionId(old.smtpConnection, userId)) chName = Some("migrate notify items") ch = RNotificationChannelMail( id, userId, chName, - old.smtpConnection, + connId, recs, now ) - _ <- OptionT.liftF(RNotificationChannelMail.insert(ch)) + _ <- OptionT.liftF(insert(ch)) args = PeriodicDueItemsArgsLegacy( old.account, NonEmptyList.of(ChannelRef(ch.id, ChannelType.Mail, chName)), @@ -99,4 +98,24 @@ object MigrateNotifyTasks extends TransactorSupport { ) } yield args } + + def findIdByAccountId(accountId: AccountId): ConnectionIO[Option[Ident]] = + sql"select u.uid from user_ u where u.cid = ${accountId.collective} and u.login = ${accountId.user}" + .query[Ident] + .option + + def findSmtpConnectionId(name: Ident, userId: Ident): ConnectionIO[Option[Ident]] = + sql"select id from useremail where uid = $userId and (name = $name or id = $name)" + .query[Ident] + .option + + // insert without 'name' column, it was added afterwards + def insert(r: RNotificationChannelMail): ConnectionIO[Int] = { + val t = RNotificationChannelMail.T + DML.insert( + t, + NonEmptyList.of(t.id, t.uid, t.connection, t.recipients, t.created), + sql"${r.id},${r.uid},${r.connection},${r.recipients},${r.created}" + ) + } } diff --git a/modules/store/src/main/scala/db/migration/data/FileIntegrityCheckArgs.scala b/modules/store/src/main/scala/db/migration/data/FileIntegrityCheckArgs.scala new file mode 100644 index 00000000..ec97977c --- /dev/null +++ b/modules/store/src/main/scala/db/migration/data/FileIntegrityCheckArgs.scala @@ -0,0 +1,84 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.data + +import cats.implicits._ +import db.migration.data.FileIntegrityCheckArgs.FileKeyPart +import docspell.common.{FileCategory, Ident} +import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} +import io.circe.{Decoder, Encoder} +import io.circe.syntax._ +import io.circe.DecodingFailure + +/** @deprecated `FileKey` and `FileKeyPart` was replaced to use a `CollectiveId` */ +final case class FileIntegrityCheckArgs(pattern: FileKeyPart) + +object FileIntegrityCheckArgs { + val taskName: Ident = Ident.unsafe("all-file-integrity-check") + + final case class FileKey(collective: Ident, category: FileCategory, id: Ident) { + override def toString = + s"${collective.id}/${category.id.id}/${id.id}" + } + + object FileKey { + + implicit val jsonDecoder: Decoder[FileKey] = + deriveDecoder[FileKey] + + implicit val jsonEncoder: Encoder[FileKey] = + deriveEncoder[FileKey] + } + + sealed trait FileKeyPart {} + + object FileKeyPart { + + case object Empty extends FileKeyPart + + final case class Collective(collective: Ident) extends FileKeyPart + + final case class Category(collective: Ident, category: FileCategory) + extends FileKeyPart + + final case class Key(key: FileKey) extends FileKeyPart + + implicit val jsonEncoder: Encoder[FileKeyPart] = + Encoder.instance { + case Empty => ().asJson + case Collective(cid) => + Map("collective" -> cid.asJson).asJson + case Category(cid, cat) => + Map("collective" -> cid.asJson, "category" -> cat.asJson).asJson + case Key(key) => + key.asJson + } + + implicit val jsonDecoder: Decoder[FileKeyPart] = + Decoder.instance { cursor => + for { + cid <- cursor.getOrElse[Option[Ident]]("collective")(None) + cat <- cursor.getOrElse[Option[FileCategory]]("category")(None) + emptyObj = cursor.keys.exists(_.isEmpty) + + c3 = cursor.as[FileKey].map(Key).toOption + c2 = (cid, cat).mapN(Category) + c1 = cid.map(Collective) + c0 = Option.when(emptyObj)(Empty) + + c = c3.orElse(c2).orElse(c1).orElse(c0) + res <- c.toRight(DecodingFailure("", cursor.history)) + } yield res + } + } + + implicit val jsonDecoder: Decoder[FileIntegrityCheckArgs] = + deriveDecoder + + implicit val jsonEncoder: Encoder[FileIntegrityCheckArgs] = + deriveEncoder +} diff --git a/modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala b/modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala index 85c6a204..f951fed7 100644 --- a/modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala @@ -7,7 +7,7 @@ package db.migration.data import ProcessItemArgs._ -import docspell.common._ +import docspell.common.{FileIntegrityCheckArgs => _, _} import docspell.common.syntax.all._ import io.circe._ @@ -68,7 +68,7 @@ object ProcessItemArgs { implicit val jsonDecoder: Decoder[ProcessMeta] = deriveDecoder[ProcessMeta] } - case class File(name: Option[String], fileMetaId: FileKey) + case class File(name: Option[String], fileMetaId: FileIntegrityCheckArgs.FileKey) object File { implicit val jsonEncoder: Encoder[File] = deriveEncoder[File] implicit val jsonDecoder: Decoder[File] = deriveDecoder[File] diff --git a/modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala b/modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala index 5df1fd36..04064ab6 100644 --- a/modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala @@ -12,9 +12,6 @@ import io.circe.{Decoder, Encoder} /** @deprecated * This has been replaced with a version using a `CollectiveId` - * - * @param collective - * @param addonTaskId */ final case class ScheduledAddonTaskArgs(collective: Ident, addonTaskId: Ident) diff --git a/modules/store/src/main/scala/db/migration/h2/V1_39_2__MigrateTasks.scala b/modules/store/src/main/scala/db/migration/h2/V1_39_2__MigrateTasks.scala new file mode 100644 index 00000000..0d4f09e0 --- /dev/null +++ b/modules/store/src/main/scala/db/migration/h2/V1_39_2__MigrateTasks.scala @@ -0,0 +1,18 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.h2 + +import cats.effect.unsafe.implicits._ +import db.migration.common.MigrateCollectiveIdTaskArgs +import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} + +class V1_39_2__MigrateTasks extends BaseJavaMigration { + val logger = org.log4s.getLogger + + override def migrate(ctx: Context): Unit = + MigrateCollectiveIdTaskArgs.run(ctx).unsafeRunSync() +} diff --git a/modules/store/src/main/scala/db/migration/mariadb/V1_39_2__MigrateTasks.scala b/modules/store/src/main/scala/db/migration/mariadb/V1_39_2__MigrateTasks.scala new file mode 100644 index 00000000..d301dd55 --- /dev/null +++ b/modules/store/src/main/scala/db/migration/mariadb/V1_39_2__MigrateTasks.scala @@ -0,0 +1,18 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.mariadb + +import cats.effect.unsafe.implicits._ +import db.migration.common.MigrateCollectiveIdTaskArgs +import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} + +class V1_39_2__MigrateTasks extends BaseJavaMigration { + val logger = org.log4s.getLogger + + override def migrate(ctx: Context): Unit = + MigrateCollectiveIdTaskArgs.run(ctx).unsafeRunSync() +} diff --git a/modules/store/src/main/scala/db/migration/postgresql/V1_39_2__MigrateTasks.scala b/modules/store/src/main/scala/db/migration/postgresql/V1_39_2__MigrateTasks.scala new file mode 100644 index 00000000..8acd6336 --- /dev/null +++ b/modules/store/src/main/scala/db/migration/postgresql/V1_39_2__MigrateTasks.scala @@ -0,0 +1,18 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + +package db.migration.postgresql + +import cats.effect.unsafe.implicits._ +import db.migration.common.MigrateCollectiveIdTaskArgs +import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} + +class V1_39_2__MigrateTasks extends BaseJavaMigration { + val logger = org.log4s.getLogger + + override def migrate(ctx: Context): Unit = + MigrateCollectiveIdTaskArgs.run(ctx).unsafeRunSync() +} diff --git a/modules/store/src/main/scala/docspell/store/file/BinnyUtils.scala b/modules/store/src/main/scala/docspell/store/file/BinnyUtils.scala index 773f2f6e..ee89bc88 100644 --- a/modules/store/src/main/scala/docspell/store/file/BinnyUtils.scala +++ b/modules/store/src/main/scala/docspell/store/file/BinnyUtils.scala @@ -29,8 +29,8 @@ object BinnyUtils { def fileKeyPartToPrefix(fkp: FileKeyPart): Option[String] = fkp match { case FileKeyPart.Empty => None - case FileKeyPart.Collective(cid) => Some(s"${cid.id}/%") - case FileKeyPart.Category(cid, cat) => Some(s"${cid.id}/${cat.id.id}/%") + case FileKeyPart.Collective(cid) => Some(s"${cid.value}/%") + case FileKeyPart.Category(cid, cat) => Some(s"${cid.value}/${cat.id.id}/%") case FileKeyPart.Key(key) => Some(fileKeyToBinaryId(key).id) } diff --git a/modules/store/src/main/scala/docspell/store/qb/DML.scala b/modules/store/src/main/scala/docspell/store/qb/DML.scala index 98939a34..e9b3c790 100644 --- a/modules/store/src/main/scala/docspell/store/qb/DML.scala +++ b/modules/store/src/main/scala/docspell/store/qb/DML.scala @@ -76,7 +76,7 @@ object DML extends DoobieMeta { setter: Nel[Setter[_]] ): Fragment = { val condFrag = cond.map(SelectBuilder.cond).getOrElse(Fragment.empty) - fr"UPDATE" ++ FromExprBuilder.buildTable(table) ++ fr"SET" ++ + fr"UPDATE" ++ FromExprBuilder.buildTable(table) ++ fr" SET" ++ setter .map(s => buildSetter(s)) .reduceLeft(_ ++ comma ++ _) ++ diff --git a/modules/store/src/main/scala/docspell/store/records/RJob.scala b/modules/store/src/main/scala/docspell/store/records/RJob.scala index 4804232b..f8b00e8e 100644 --- a/modules/store/src/main/scala/docspell/store/records/RJob.scala +++ b/modules/store/src/main/scala/docspell/store/records/RJob.scala @@ -44,6 +44,9 @@ case class RJob( def isInProgress: Boolean = JobState.inProgress.contains(state) + + def withArgs[A: Encoder](args: A): RJob = + copy(args = Encoder[A].apply(args).noSpaces) } object RJob { @@ -151,6 +154,13 @@ object RJob { ) } + def findByTaskName(task: Ident): ConnectionIO[Vector[RJob]] = + Select( + select(T.all), + from(T), + T.task === task + ).build.query[RJob].to[Vector] + def findFromIds(ids: Seq[Ident]): ConnectionIO[Vector[RJob]] = NonEmptyList.fromList(ids.toList) match { case None => @@ -180,6 +190,12 @@ object RJob { ) } + def setJsonArgs[A: Encoder](jobId: Ident, args: A): ConnectionIO[Int] = + DML.update(T, T.id === jobId, DML.set(T.args.setTo(Encoder[A].apply(args).noSpaces))) + + def setArgs(jobId: Ident, args: String): ConnectionIO[Int] = + DML.update(T, T.id === jobId, DML.set(T.args.setTo(args))) + def incrementRetries(jobid: Ident): ConnectionIO[Int] = DML .update( @@ -301,6 +317,14 @@ object RJob { n1 <- DML.delete(T, T.id === jobId) } yield n0 + n1 + def deleteByTask(task: Ident): ConnectionIO[Int] = { + val query = Select(select(T.id), from(T), T.task === task) + for { + n1 <- DML.delete(RJobLog.T, RJobLog.T.jobId.in(query)) + n2 <- DML.delete(T, T.task === task) + } yield n1 + n2 + } + def findIdsDoneAndOlderThan(ts: Timestamp): Stream[ConnectionIO, Ident] = run( select(T.id), diff --git a/modules/store/src/main/scala/docspell/store/records/RPeriodicTask.scala b/modules/store/src/main/scala/docspell/store/records/RPeriodicTask.scala index 813b9a10..b250d9d4 100644 --- a/modules/store/src/main/scala/docspell/store/records/RPeriodicTask.scala +++ b/modules/store/src/main/scala/docspell/store/records/RPeriodicTask.scala @@ -9,14 +9,13 @@ package docspell.store.records import cats.data.NonEmptyList import cats.effect._ import cats.implicits._ - import docspell.common._ import docspell.store.qb.DSL._ import docspell.store.qb._ - import com.github.eikek.calev.CalEvent import doobie._ import doobie.implicits._ +import io.circe.Encoder /** A periodic task is a special job description, that shares a few properties of a * `RJob`. It must provide all information to create a `RJob` value eventually. @@ -38,6 +37,9 @@ case class RPeriodicTask( summary: Option[String] ) { + def withArgs[A: Encoder](args: A): RPeriodicTask = + copy(args = Encoder[A].apply(args).noSpaces) + def toJob[F[_]: Sync]: F[RJob] = for { now <- Timestamp.current[F] @@ -112,6 +114,9 @@ object RPeriodicTask { def updateTask(id: Ident, taskName: Ident, args: String): ConnectionIO[Int] = DML.update(T, T.id === id, DML.set(T.task.setTo(taskName), T.args.setTo(args))) + def setArgs(taskId: Ident, args: String): ConnectionIO[Int] = + DML.update(T, T.id === taskId, DML.set(T.args.setTo(args))) + def setEnabledByTask(taskName: Ident, enabled: Boolean): ConnectionIO[Int] = DML.update(T, T.task === taskName, DML.set(T.enabled.setTo(enabled))) From e9e3e607d3e266c2019af4ab40e6d20eaff61561 Mon Sep 17 00:00:00 2001 From: eikek Date: Sun, 7 Aug 2022 15:12:48 +0200 Subject: [PATCH 10/15] Fix collective insights query --- .../src/main/scala/docspell/store/queries/QCollective.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/store/src/main/scala/docspell/store/queries/QCollective.scala b/modules/store/src/main/scala/docspell/store/queries/QCollective.scala index fc2740c3..de37aec6 100644 --- a/modules/store/src/main/scala/docspell/store/queries/QCollective.scala +++ b/modules/store/src/main/scala/docspell/store/queries/QCollective.scala @@ -97,7 +97,7 @@ object QCollective { (select a.attachid as aid, a.filemetaid as fid from attachment a inner join item i on a.itemid = i.itemid - where i.cid = $coll) + where i.coll_id = $coll) select a.fid,m.length from attachs a inner join filemeta m on m.file_id = a.fid union distinct From d9485355e9c6adf23344119dcd5e040118664d22 Mon Sep 17 00:00:00 2001 From: eikek Date: Sun, 7 Aug 2022 15:17:52 +0200 Subject: [PATCH 11/15] Fix websocket frame to user mapping --- .../restserver/conv/AddonValidationSupport.scala | 5 ++--- .../restserver/routes/AddonArchiveRoutes.scala | 2 +- .../scala/docspell/restserver/ws/OutputEvent.scala | 12 ++++++------ .../docspell/restserver/ws/WebSocketRoutes.scala | 2 +- 4 files changed, 10 insertions(+), 11 deletions(-) diff --git a/modules/restserver/src/main/scala/docspell/restserver/conv/AddonValidationSupport.scala b/modules/restserver/src/main/scala/docspell/restserver/conv/AddonValidationSupport.scala index dd2351b8..2ae9872f 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/conv/AddonValidationSupport.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/conv/AddonValidationSupport.scala @@ -7,11 +7,10 @@ package docspell.restserver.conv import cats.syntax.all._ - import docspell.addons.AddonMeta import docspell.backend.ops.AddonValidationError import docspell.backend.ops.OAddons.AddonValidationResult -import docspell.common.Ident +import docspell.common.CollectiveId import docspell.restserver.ws.{OutputEvent, OutputEventEncoder} import docspell.store.records.RAddonArchive @@ -51,7 +50,7 @@ trait AddonValidationSupport { } def addonResultOutputEventEncoder( - collective: Ident + collective: CollectiveId ): OutputEventEncoder[AddonValidationResult[(RAddonArchive, AddonMeta)]] = OutputEventEncoder.instance { case Right((archive, _)) => diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/AddonArchiveRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/AddonArchiveRoutes.scala index 6995ebb8..637b6763 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/AddonArchiveRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/AddonArchiveRoutes.scala @@ -34,7 +34,7 @@ object AddonArchiveRoutes extends AddonValidationSupport { ): HttpRoutes[F] = { val dsl = new Http4sDsl[F] {} import dsl._ - implicit val wsOutputEnc = addonResultOutputEventEncoder(token.account.collective) + implicit val wsOutputEnc = addonResultOutputEventEncoder(token.account.collectiveId) HttpRoutes.of { case GET -> Root => diff --git a/modules/restserver/src/main/scala/docspell/restserver/ws/OutputEvent.scala b/modules/restserver/src/main/scala/docspell/restserver/ws/OutputEvent.scala index f41dd5de..42fb3e7d 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/ws/OutputEvent.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/ws/OutputEvent.scala @@ -34,7 +34,7 @@ object OutputEvent { final case class JobSubmitted(group: Ident, task: Ident) extends OutputEvent { def forCollective(token: AuthToken): Boolean = - token.account.collective == group + token.account.collectiveId.valueAsIdent == group def asJson: Json = Msg("job-submitted", task).asJson @@ -47,7 +47,7 @@ object OutputEvent { result: Option[Json] ) extends OutputEvent { def forCollective(token: AuthToken): Boolean = - token.account.collective == group + token.account.collectiveId.valueAsIdent == group def asJson: Json = Msg( @@ -56,23 +56,23 @@ object OutputEvent { ).asJson } - final case class JobsWaiting(collective: Ident, count: Int) extends OutputEvent { + final case class JobsWaiting(collective: CollectiveId, count: Int) extends OutputEvent { def forCollective(token: AuthToken): Boolean = - token.account.collective == collective + token.account.collectiveId == collective def asJson: Json = Msg("jobs-waiting", count).asJson } final case class AddonInstalled( - collective: Ident, + collective: CollectiveId, message: String, error: Option[AddonValidationError], addonId: Option[Ident], originalUrl: Option[LenientUri] ) extends OutputEvent { def forCollective(token: AuthToken) = - token.account.collective == collective + token.account.collectiveId == collective override def asJson = Msg( diff --git a/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala index 42ba54b2..44bb2229 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala @@ -34,7 +34,7 @@ object WebSocketRoutes { val init = for { jc <- backend.job.getUnfinishedJobCount(UserTaskScope(user.account)) - msg = OutputEvent.JobsWaiting(user.account.collective, jc) + msg = OutputEvent.JobsWaiting(user.account.collectiveId, jc) } yield Text(msg.encode) val toClient: Stream[F, WebSocketFrame.Text] = From 868285a26ba910dff300ee82956d2fa2d4701aca Mon Sep 17 00:00:00 2001 From: eikek Date: Sun, 7 Aug 2022 16:19:56 +0200 Subject: [PATCH 12/15] Fix fulltext search queries for new collective-id --- .../scala/docspell/ftsclient/FtsResult.scala | 2 +- .../docspell/ftspsql/FtsRepository.scala | 23 +++++++++++-------- .../docspell/ftspsql/PsqlFtsClient.scala | 3 +++ .../scala/docspell/ftspsql/SearchResult.scala | 2 +- .../scala/docspell/ftssolr/JsonCodec.scala | 2 +- .../docspell/store/fts/TempFtsOpsTest.scala | 4 ++-- 6 files changed, 21 insertions(+), 15 deletions(-) diff --git a/modules/fts-client/src/main/scala/docspell/ftsclient/FtsResult.scala b/modules/fts-client/src/main/scala/docspell/ftsclient/FtsResult.scala index 8af09caf..516711a9 100644 --- a/modules/fts-client/src/main/scala/docspell/ftsclient/FtsResult.scala +++ b/modules/fts-client/src/main/scala/docspell/ftsclient/FtsResult.scala @@ -29,7 +29,7 @@ object FtsResult { case class ItemMatch( id: Ident, itemId: Ident, - collectiveId: Ident, + collectiveId: CollectiveId, score: Double, data: MatchData ) diff --git a/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRepository.scala b/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRepository.scala index 55d51c19..b8729760 100644 --- a/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRepository.scala +++ b/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRepository.scala @@ -16,6 +16,7 @@ import doobie._ import doobie.implicits._ object FtsRepository extends DoobieMeta { + private[this] val logger = docspell.logging.getLogger[ConnectionIO] val table = fr"ftspsql_search" def containsData: ConnectionIO[Boolean] = @@ -62,15 +63,17 @@ object FtsRepository extends DoobieMeta { val query = mkQueryPart(pq, q) - sql"""select $select - |from $table, $query - |where ${mkCondition(q)} AND query @@ text_index - |order by rank desc - |limit ${q.limit} - |offset ${q.offset} - |""".stripMargin - .query[SearchResult] - .to[Vector] + val sqlFrag = + sql"""select $select + |from $table, $query + |where ${mkCondition(q)} AND query @@ text_index + |order by rank desc + |limit ${q.limit} + |offset ${q.offset} + |""".stripMargin + + logger.asUnsafe.trace(s"PSQL Fulltext query: $sqlFrag") + sqlFrag.query[SearchResult].to[Vector] } private def mkCondition(q: FtsQuery): Fragment = { @@ -84,7 +87,7 @@ object FtsRepository extends DoobieMeta { val folders = NonEmptyList.fromList(q.folders.toList).map { nel => val ids = nel.map(id => fr"$id").reduceLeft(_ ++ fr"," ++ _) - fr"folder_id in ($ids)" + fr"(folder_id in ($ids) or folder_id is null)" } List(items, folders).flatten.foldLeft(coll)(_ ++ fr"AND" ++ _) diff --git a/modules/fts-psql/src/main/scala/docspell/ftspsql/PsqlFtsClient.scala b/modules/fts-psql/src/main/scala/docspell/ftspsql/PsqlFtsClient.scala index f0dc64ea..fafc92ad 100644 --- a/modules/fts-psql/src/main/scala/docspell/ftspsql/PsqlFtsClient.scala +++ b/modules/fts-psql/src/main/scala/docspell/ftspsql/PsqlFtsClient.scala @@ -26,6 +26,8 @@ final class PsqlFtsClient[F[_]: Sync](cfg: PsqlConfig, xa: Transactor[F]) val engine = Ident.unsafe("postgres") val config = cfg + private[this] val logger = docspell.logging.getLogger[F] + private[ftspsql] val transactor = xa private[this] val searchSummary = @@ -83,6 +85,7 @@ final class PsqlFtsClient[F[_]: Sync](cfg: PsqlConfig, xa: Transactor[F]) summary <- searchSummary(q).transact(xa) results <- search(q, true).transact(xa) endNanos <- Sync[F].delay(System.nanoTime()) + _ <- logger.debug(s"PSQL fulltext search hits: ${results.size}") duration = Duration.nanos(endNanos - startNanos) res = SearchResult .toFtsResult(summary, results) diff --git a/modules/fts-psql/src/main/scala/docspell/ftspsql/SearchResult.scala b/modules/fts-psql/src/main/scala/docspell/ftspsql/SearchResult.scala index faf37fe7..04073102 100644 --- a/modules/fts-psql/src/main/scala/docspell/ftspsql/SearchResult.scala +++ b/modules/fts-psql/src/main/scala/docspell/ftspsql/SearchResult.scala @@ -13,7 +13,7 @@ import docspell.ftsclient.FtsResult.{ItemMatch, MatchData} final case class SearchResult( id: Ident, itemId: Ident, - collective: Ident, + collective: CollectiveId, language: Language, attachId: Option[Ident], folderId: Option[Ident], diff --git a/modules/fts-solr/src/main/scala/docspell/ftssolr/JsonCodec.scala b/modules/fts-solr/src/main/scala/docspell/ftssolr/JsonCodec.scala index e4684fe8..fae84a88 100644 --- a/modules/fts-solr/src/main/scala/docspell/ftssolr/JsonCodec.scala +++ b/modules/fts-solr/src/main/scala/docspell/ftssolr/JsonCodec.scala @@ -125,7 +125,7 @@ trait JsonCodec { for { itemId <- c.get[Ident](Field.itemId.name) id <- c.get[Ident](Field.id.name) - coll <- c.get[Ident](Field.collectiveId.name) + coll <- c.get[CollectiveId](Field.collectiveId.name) score <- c.get[Double]("score") md <- decodeMatchData(c) } yield FtsResult.ItemMatch(id, itemId, coll, score, md) diff --git a/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala b/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala index e4876c74..03b37fe8 100644 --- a/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala +++ b/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala @@ -130,14 +130,14 @@ class TempFtsOpsTest extends DatabaseTest { ItemMatch( id(s"m$n"), id(s"item-$n"), - DocspellSystem.user, + CollectiveId(1), math.random(), FtsResult.ItemData ), ItemMatch( id(s"m$n-1"), id(s"item-$n"), - DocspellSystem.user, + CollectiveId(1), math.random(), AttachmentData(id(s"item-$n-attach-1"), "attachment.pdf") ) From b51b782c7362b38b02efbe5e77e8229794067322 Mon Sep 17 00:00:00 2001 From: eikek Date: Sun, 7 Aug 2022 16:31:09 +0200 Subject: [PATCH 13/15] Apply formatting fixes --- .../scala/docspell/addons/out/NewFile.scala | 2 ++ .../docspell/backend/BackendCommands.scala | 3 ++- .../scala/docspell/backend/JobFactory.scala | 1 + .../backend/joex/AddonPostProcess.scala | 1 + .../docspell/backend/joex/AddonPrepare.scala | 2 ++ .../backend/joex/FindJobOwnerAccount.scala | 6 +++++ .../backend/ops/AddonRunConfigValidate.scala | 1 + .../docspell/backend/ops/AddonValidate.scala | 1 + .../docspell/backend/ops/OAttachment.scala | 1 + .../docspell/backend/ops/OCollective.scala | 2 ++ .../docspell/backend/ops/ODownloadAll.scala | 3 +++ .../docspell/backend/ops/OEquipment.scala | 1 + .../scala/docspell/backend/ops/OFolder.scala | 3 ++- .../scala/docspell/backend/ops/OItem.scala | 2 ++ .../docspell/backend/ops/OItemLink.scala | 1 + .../scala/docspell/backend/ops/OJob.scala | 1 + .../scala/docspell/backend/ops/OTotp.scala | 1 + .../scala/docspell/backend/ops/OUpload.scala | 1 + .../docspell/backend/signup/OSignup.scala | 2 ++ .../backend/task/DownloadZipArgs.scala | 1 + .../scala/docspell/common/TaskArguments.scala | 6 +++++ .../scala/docspell/ftspsql/FtsRecord.scala | 1 + .../scala/docspell/joex/JoexAppImpl.scala | 2 ++ .../docspell/joex/fts/MigrationTask.scala | 1 + .../docspell/joex/learn/ClassifierName.scala | 2 ++ .../multiupload/MultiUploadArchiveTask.scala | 1 + .../joex/pagecount/AllPageCountTask.scala | 1 + .../joex/pdfconv/ConvertAllPdfTask.scala | 1 + .../joex/preview/AllPreviewsTask.scala | 1 + .../joex/updatecheck/UpdateCheckTask.scala | 2 ++ .../docspell/restserver/RestAppImpl.scala | 2 ++ .../docspell/restserver/auth/CookieData.scala | 1 + .../conv/AddonValidationSupport.scala | 1 + .../restserver/routes/AddonRunRoutes.scala | 2 ++ .../restserver/routes/AttachmentRoutes.scala | 2 ++ .../routes/IntegrationEndpointRoutes.scala | 2 ++ .../restserver/routes/ItemMultiRoutes.scala | 2 ++ .../restserver/routes/ItemRoutes.scala | 2 ++ .../restserver/routes/JobQueueRoutes.scala | 2 ++ .../restserver/routes/UserRoutes.scala | 2 ++ .../restserver/ws/WebSocketRoutes.scala | 2 ++ .../docspell/scheduler/FindJobOwner.scala | 7 ++++++ .../main/scala/docspell/scheduler/Job.scala | 2 ++ .../scheduler/impl/JobStorePublish.scala | 1 + .../db/migration/common/JsonCodecs.scala | 6 +++++ .../common/MigrateCollectiveIdTaskArgs.scala | 22 +++++++++++++------ .../common/MigrateDueItemTasks.scala | 10 ++++++++- .../migration/common/MigrateNotifyTasks.scala | 14 +++++++++--- .../migration/common/TransactorSupport.scala | 8 +++++++ .../db/migration/data/AllPreviewsArgs.scala | 1 + .../db/migration/data/ConvertAllPdfArgs.scala | 1 + .../db/migration/data/DownloadZipArgs.scala | 3 ++- .../data/FileIntegrityCheckArgs.scala | 10 +++++---- .../db/migration/data/ItemAddonTaskArgs.scala | 1 + .../migration/data/PeriodicDueItemsArgs.scala | 2 ++ .../db/migration/data/PeriodicQueryArgs.scala | 2 ++ .../db/migration/data/ProcessItemArgs.scala | 4 ++-- .../db/migration/data/ReIndexTaskArgs.scala | 1 + .../data/ScheduledAddonTaskArgs.scala | 1 + .../h2/V1_29_2__MigrateNotifyTask.scala | 1 + .../h2/V1_32_2__MigrateChannels.scala | 1 + .../migration/h2/V1_39_2__MigrateTasks.scala | 1 + .../mariadb/V1_29_2__MigrateNotifyTask.scala | 1 + .../mariadb/V1_32_2__MigrateChannels.scala | 1 + .../mariadb/V1_39_2__MigrateTasks.scala | 1 + .../V1_29_2__MigrateNotifyTask.scala | 1 + .../postgresql/V1_32_2__MigrateChannels.scala | 1 + .../postgresql/V1_39_2__MigrateTasks.scala | 1 + .../store/records/RPeriodicTask.scala | 2 ++ .../docspell/store/fts/TempFtsOpsTest.scala | 3 +++ .../docspell/store/migrate/MigrateTest.scala | 2 ++ 71 files changed, 166 insertions(+), 20 deletions(-) diff --git a/modules/addonlib/src/main/scala/docspell/addons/out/NewFile.scala b/modules/addonlib/src/main/scala/docspell/addons/out/NewFile.scala index 86d4ed05..395cf4b1 100644 --- a/modules/addonlib/src/main/scala/docspell/addons/out/NewFile.scala +++ b/modules/addonlib/src/main/scala/docspell/addons/out/NewFile.scala @@ -9,10 +9,12 @@ package docspell.addons.out import cats.effect.Sync import cats.syntax.all._ import fs2.io.file.{Files, Path} + import docspell.addons.out.NewFile.Meta import docspell.common.ProcessItemArgs.ProcessMeta import docspell.common.{CollectiveId, Ident, Language} import docspell.logging.Logger + import io.circe.Codec import io.circe.generic.extras.Configuration import io.circe.generic.extras.semiauto.deriveConfiguredCodec diff --git a/modules/backend/src/main/scala/docspell/backend/BackendCommands.scala b/modules/backend/src/main/scala/docspell/backend/BackendCommands.scala index d663981c..52af364d 100644 --- a/modules/backend/src/main/scala/docspell/backend/BackendCommands.scala +++ b/modules/backend/src/main/scala/docspell/backend/BackendCommands.scala @@ -9,11 +9,12 @@ package docspell.backend import cats.data.{NonEmptyList => Nel} import cats.effect.Sync import cats.syntax.all._ + import docspell.backend.BackendCommands.EventContext import docspell.backend.ops.OCustomFields.SetValue import docspell.backend.ops._ -import docspell.common.bc._ import docspell.common._ +import docspell.common.bc._ private[backend] class BackendCommands[F[_]: Sync]( itemOps: OItem[F], diff --git a/modules/backend/src/main/scala/docspell/backend/JobFactory.scala b/modules/backend/src/main/scala/docspell/backend/JobFactory.scala index dd54dd2b..39ed8755 100644 --- a/modules/backend/src/main/scala/docspell/backend/JobFactory.scala +++ b/modules/backend/src/main/scala/docspell/backend/JobFactory.scala @@ -8,6 +8,7 @@ package docspell.backend import cats.effect._ import cats.implicits._ + import docspell.backend.MailAddressCodec import docspell.backend.task.DownloadZipArgs import docspell.common._ diff --git a/modules/backend/src/main/scala/docspell/backend/joex/AddonPostProcess.scala b/modules/backend/src/main/scala/docspell/backend/joex/AddonPostProcess.scala index 26b83f85..cf5452f7 100644 --- a/modules/backend/src/main/scala/docspell/backend/joex/AddonPostProcess.scala +++ b/modules/backend/src/main/scala/docspell/backend/joex/AddonPostProcess.scala @@ -10,6 +10,7 @@ import cats.data.OptionT import cats.effect.kernel.Sync import cats.syntax.all._ import fs2.io.file.{Files, Path} + import docspell.addons._ import docspell.addons.out.{AddonOutput, ItemFile, NewItem} import docspell.backend.JobFactory diff --git a/modules/backend/src/main/scala/docspell/backend/joex/AddonPrepare.scala b/modules/backend/src/main/scala/docspell/backend/joex/AddonPrepare.scala index 87097842..505b781d 100644 --- a/modules/backend/src/main/scala/docspell/backend/joex/AddonPrepare.scala +++ b/modules/backend/src/main/scala/docspell/backend/joex/AddonPrepare.scala @@ -9,6 +9,7 @@ package docspell.backend.joex import cats.data.{Kleisli, OptionT} import cats.effect._ import cats.syntax.all._ + import docspell.addons.Middleware import docspell.backend.auth.AuthToken import docspell.backend.joex.AddonOps.AddonRunConfigRef @@ -17,6 +18,7 @@ import docspell.logging.Logger import docspell.store.Store import docspell.store.queries.QLogin import docspell.store.records.RNode + import scodec.bits.ByteVector private[joex] class AddonPrepare[F[_]: Sync](store: Store[F]) extends LoggerExtension { diff --git a/modules/backend/src/main/scala/docspell/backend/joex/FindJobOwnerAccount.scala b/modules/backend/src/main/scala/docspell/backend/joex/FindJobOwnerAccount.scala index 500ac28b..445f0daa 100644 --- a/modules/backend/src/main/scala/docspell/backend/joex/FindJobOwnerAccount.scala +++ b/modules/backend/src/main/scala/docspell/backend/joex/FindJobOwnerAccount.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + package docspell.backend.joex import docspell.common.AccountId diff --git a/modules/backend/src/main/scala/docspell/backend/ops/AddonRunConfigValidate.scala b/modules/backend/src/main/scala/docspell/backend/ops/AddonRunConfigValidate.scala index f9d96762..bed83ba7 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/AddonRunConfigValidate.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/AddonRunConfigValidate.scala @@ -9,6 +9,7 @@ package docspell.backend.ops import cats.data.NonEmptyList import cats.effect._ import cats.syntax.all._ + import docspell.backend.ops.AddonRunConfigError._ import docspell.backend.ops.OAddons.{AddonRunConfigResult, AddonRunInsert} import docspell.common.CollectiveId diff --git a/modules/backend/src/main/scala/docspell/backend/ops/AddonValidate.scala b/modules/backend/src/main/scala/docspell/backend/ops/AddonValidate.scala index 3e0a9632..73e783ac 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/AddonValidate.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/AddonValidate.scala @@ -11,6 +11,7 @@ import cats.effect._ import cats.syntax.all._ import fs2.Stream import fs2.io.file.Path + import docspell.addons.{AddonMeta, RunnerType} import docspell.backend.Config import docspell.backend.ops.AddonValidationError._ diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OAttachment.scala b/modules/backend/src/main/scala/docspell/backend/ops/OAttachment.scala index 80b51ba0..1f44415d 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OAttachment.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OAttachment.scala @@ -10,6 +10,7 @@ import cats.data.{NonEmptyList => Nel, OptionT} import cats.effect._ import cats.syntax.all._ import fs2.Stream + import docspell.backend.JobFactory import docspell.common.MakePreviewArgs.StoreMode import docspell.common._ diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala b/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala index 41b3bfdd..6db532d5 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OCollective.scala @@ -10,6 +10,7 @@ import cats.data.OptionT import cats.effect.{Async, Resource} import cats.implicits._ import fs2.Stream + import docspell.backend.JobFactory import docspell.backend.PasswordCrypt import docspell.backend.ops.OCollective._ @@ -20,6 +21,7 @@ import docspell.store.UpdateResult import docspell.store.queries.{QCollective, QUser} import docspell.store.records._ import docspell.store.{AddResult, Store} + import com.github.eikek.calev._ trait OCollective[F[_]] { diff --git a/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala b/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala index a99c170c..98d070db 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/ODownloadAll.scala @@ -7,10 +7,12 @@ package docspell.backend.ops import java.security.MessageDigest + import cats.data.OptionT import cats.effect._ import cats.syntax.all._ import fs2.{Pipe, Stream} + import docspell.backend.JobFactory import docspell.backend.ops.ODownloadAll.model._ import docspell.backend.ops.OJob.JobCancelResult @@ -24,6 +26,7 @@ import docspell.store.Store import docspell.store.file.FileMetadata import docspell.store.queries.{QItem, Query} import docspell.store.records.{RDownloadQuery, RFileMeta, RJob} + import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} import scodec.bits.ByteVector diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OEquipment.scala b/modules/backend/src/main/scala/docspell/backend/ops/OEquipment.scala index 8e44a351..1315cb70 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OEquipment.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OEquipment.scala @@ -9,6 +9,7 @@ package docspell.backend.ops import cats.data.NonEmptyList import cats.effect.{Async, Resource} import cats.implicits._ + import docspell.common._ import docspell.store.records.{REquipment, RItem} import docspell.store.{AddResult, Store} diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala b/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala index 744d3d02..b582da6b 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OFolder.scala @@ -7,8 +7,9 @@ package docspell.backend.ops import cats.data.{NonEmptyList => Nel} -import cats.syntax.option._ import cats.effect._ +import cats.syntax.option._ + import docspell.common._ import docspell.store.queries.QFolder import docspell.store.records.{RFolder, RUser} diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OItem.scala b/modules/backend/src/main/scala/docspell/backend/ops/OItem.scala index 7462bc18..9b796431 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OItem.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OItem.scala @@ -9,6 +9,7 @@ package docspell.backend.ops import cats.data.{NonEmptyList => Nel, OptionT} import cats.effect.{Async, Resource} import cats.implicits._ + import docspell.backend.AttachedEvent import docspell.backend.JobFactory import docspell.backend.fulltext.CreateIndex @@ -22,6 +23,7 @@ import docspell.scheduler.usertask.UserTaskScope import docspell.store.queries.{QAttachment, QItem, QMoveAttachment} import docspell.store.records._ import docspell.store.{AddResult, Store, UpdateResult} + import doobie.implicits._ trait OItem[F[_]] { diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OItemLink.scala b/modules/backend/src/main/scala/docspell/backend/ops/OItemLink.scala index 79e68c42..bfa3b675 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OItemLink.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OItemLink.scala @@ -9,6 +9,7 @@ package docspell.backend.ops import cats.data.NonEmptyList import cats.effect._ import cats.implicits._ + import docspell.backend.ops.OItemLink.LinkResult import docspell.backend.ops.search.OSearch import docspell.common._ diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OJob.scala b/modules/backend/src/main/scala/docspell/backend/ops/OJob.scala index 67923b7d..1ca39f9d 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OJob.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OJob.scala @@ -9,6 +9,7 @@ package docspell.backend.ops import cats.data.OptionT import cats.effect._ import cats.implicits._ + import docspell.backend.ops.OJob.{CollectiveQueueState, JobCancelResult} import docspell.common._ import docspell.pubsub.api.PubSubT diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OTotp.scala b/modules/backend/src/main/scala/docspell/backend/ops/OTotp.scala index ea11ca6e..4f18a52a 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OTotp.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OTotp.scala @@ -9,6 +9,7 @@ package docspell.backend.ops import cats.data.OptionT import cats.effect._ import cats.implicits._ + import docspell.backend.ops.OTotp.{ConfirmResult, InitResult, OtpState} import docspell.common._ import docspell.store.records.RTotp diff --git a/modules/backend/src/main/scala/docspell/backend/ops/OUpload.scala b/modules/backend/src/main/scala/docspell/backend/ops/OUpload.scala index 246cdd2d..d4f9377c 100644 --- a/modules/backend/src/main/scala/docspell/backend/ops/OUpload.scala +++ b/modules/backend/src/main/scala/docspell/backend/ops/OUpload.scala @@ -11,6 +11,7 @@ import cats.data.{EitherT, OptionT} import cats.effect._ import cats.implicits._ import fs2.Stream + import docspell.backend.JobFactory import docspell.common._ import docspell.scheduler.usertask.UserTaskScope diff --git a/modules/backend/src/main/scala/docspell/backend/signup/OSignup.scala b/modules/backend/src/main/scala/docspell/backend/signup/OSignup.scala index 67904766..ba6bd124 100644 --- a/modules/backend/src/main/scala/docspell/backend/signup/OSignup.scala +++ b/modules/backend/src/main/scala/docspell/backend/signup/OSignup.scala @@ -9,10 +9,12 @@ package docspell.backend.signup import cats.data.OptionT import cats.effect.{Async, Resource} import cats.implicits._ + import docspell.backend.PasswordCrypt import docspell.common._ import docspell.store.records.{RCollective, RInvitation, RUser} import docspell.store.{AddResult, Store} + import doobie.free.connection.ConnectionIO trait OSignup[F[_]] { diff --git a/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala b/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala index 29e33863..ce11314b 100644 --- a/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala +++ b/modules/backend/src/main/scala/docspell/backend/task/DownloadZipArgs.scala @@ -8,6 +8,7 @@ package docspell.backend.task import docspell.backend.ops.ODownloadAll.model.DownloadRequest import docspell.common._ + import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} diff --git a/modules/common/src/main/scala/docspell/common/TaskArguments.scala b/modules/common/src/main/scala/docspell/common/TaskArguments.scala index 86117b0c..b397aa09 100644 --- a/modules/common/src/main/scala/docspell/common/TaskArguments.scala +++ b/modules/common/src/main/scala/docspell/common/TaskArguments.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + package docspell.common /** A marker trait for task arguments. diff --git a/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRecord.scala b/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRecord.scala index d760151c..adf2394b 100644 --- a/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRecord.scala +++ b/modules/fts-psql/src/main/scala/docspell/ftspsql/FtsRecord.scala @@ -7,6 +7,7 @@ package docspell.ftspsql import cats.syntax.all._ + import docspell.common.{CollectiveId, Ident, Language} import docspell.ftsclient.TextData diff --git a/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala b/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala index 830a6aba..bf9e0137 100644 --- a/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala +++ b/modules/joex/src/main/scala/docspell/joex/JoexAppImpl.scala @@ -9,6 +9,7 @@ package docspell.joex import cats.effect._ import cats.implicits._ import fs2.concurrent.SignallingRef + import docspell.backend.MailAddressCodec import docspell.backend.joex.FindJobOwnerAccount import docspell.backend.ops._ @@ -27,6 +28,7 @@ import docspell.scheduler.impl.{JobStoreModuleBuilder, SchedulerModuleBuilder} import docspell.scheduler.usertask.{UserTaskScope, UserTaskStore} import docspell.store.Store import docspell.store.records.{REmptyTrashSetting, RJobLog} + import emil.javamail._ import org.http4s.client.Client diff --git a/modules/joex/src/main/scala/docspell/joex/fts/MigrationTask.scala b/modules/joex/src/main/scala/docspell/joex/fts/MigrationTask.scala index 62cdc0e3..9d61dc0b 100644 --- a/modules/joex/src/main/scala/docspell/joex/fts/MigrationTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/fts/MigrationTask.scala @@ -8,6 +8,7 @@ package docspell.joex.fts import cats.effect._ import cats.implicits._ + import docspell.backend.fulltext.CreateIndex import docspell.common._ import docspell.ftsclient._ diff --git a/modules/joex/src/main/scala/docspell/joex/learn/ClassifierName.scala b/modules/joex/src/main/scala/docspell/joex/learn/ClassifierName.scala index 3394d467..1b1b2e83 100644 --- a/modules/joex/src/main/scala/docspell/joex/learn/ClassifierName.scala +++ b/modules/joex/src/main/scala/docspell/joex/learn/ClassifierName.scala @@ -8,8 +8,10 @@ package docspell.joex.learn import cats.data.NonEmptyList import cats.implicits._ + import docspell.common.CollectiveId import docspell.store.records.{RClassifierModel, RClassifierSetting} + import doobie._ final class ClassifierName(val name: String) extends AnyVal diff --git a/modules/joex/src/main/scala/docspell/joex/multiupload/MultiUploadArchiveTask.scala b/modules/joex/src/main/scala/docspell/joex/multiupload/MultiUploadArchiveTask.scala index 56438c44..e1ff77a1 100644 --- a/modules/joex/src/main/scala/docspell/joex/multiupload/MultiUploadArchiveTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/multiupload/MultiUploadArchiveTask.scala @@ -11,6 +11,7 @@ import cats.data.OptionT import cats.effect._ import cats.implicits._ import fs2.Stream + import docspell.backend.JobFactory import docspell.common._ import docspell.common.util.Zip diff --git a/modules/joex/src/main/scala/docspell/joex/pagecount/AllPageCountTask.scala b/modules/joex/src/main/scala/docspell/joex/pagecount/AllPageCountTask.scala index cbb3d245..cac25c4c 100644 --- a/modules/joex/src/main/scala/docspell/joex/pagecount/AllPageCountTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/pagecount/AllPageCountTask.scala @@ -9,6 +9,7 @@ package docspell.joex.pagecount import cats.effect._ import cats.implicits._ import fs2.{Chunk, Stream} + import docspell.backend.JobFactory import docspell.common._ import docspell.scheduler._ diff --git a/modules/joex/src/main/scala/docspell/joex/pdfconv/ConvertAllPdfTask.scala b/modules/joex/src/main/scala/docspell/joex/pdfconv/ConvertAllPdfTask.scala index c1a8eeab..d34d0d92 100644 --- a/modules/joex/src/main/scala/docspell/joex/pdfconv/ConvertAllPdfTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/pdfconv/ConvertAllPdfTask.scala @@ -9,6 +9,7 @@ package docspell.joex.pdfconv import cats.effect._ import cats.implicits._ import fs2.{Chunk, Stream} + import docspell.common._ import docspell.scheduler._ import docspell.scheduler.usertask.UserTaskScope diff --git a/modules/joex/src/main/scala/docspell/joex/preview/AllPreviewsTask.scala b/modules/joex/src/main/scala/docspell/joex/preview/AllPreviewsTask.scala index 75c75d23..2bc6d22f 100644 --- a/modules/joex/src/main/scala/docspell/joex/preview/AllPreviewsTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/preview/AllPreviewsTask.scala @@ -9,6 +9,7 @@ package docspell.joex.preview import cats.effect._ import cats.implicits._ import fs2.{Chunk, Stream} + import docspell.backend.JobFactory import docspell.common.MakePreviewArgs.StoreMode import docspell.common._ diff --git a/modules/joex/src/main/scala/docspell/joex/updatecheck/UpdateCheckTask.scala b/modules/joex/src/main/scala/docspell/joex/updatecheck/UpdateCheckTask.scala index ce9f436e..57ccecae 100644 --- a/modules/joex/src/main/scala/docspell/joex/updatecheck/UpdateCheckTask.scala +++ b/modules/joex/src/main/scala/docspell/joex/updatecheck/UpdateCheckTask.scala @@ -9,11 +9,13 @@ package docspell.joex.updatecheck import cats.data.OptionT import cats.effect._ import cats.implicits._ + import docspell.common._ import docspell.scheduler.Task import docspell.scheduler.usertask.UserTask import docspell.store.Store import docspell.store.records.{RUser, RUserEmail} + import emil._ object UpdateCheckTask { diff --git a/modules/restserver/src/main/scala/docspell/restserver/RestAppImpl.scala b/modules/restserver/src/main/scala/docspell/restserver/RestAppImpl.scala index 1e1e9e8e..500a51d5 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/RestAppImpl.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/RestAppImpl.scala @@ -9,6 +9,7 @@ package docspell.restserver import cats.effect._ import fs2.Stream import fs2.concurrent.Topic + import docspell.backend.BackendApp import docspell.backend.auth.{AuthToken, ShareToken} import docspell.backend.joex.FindJobOwnerAccount @@ -28,6 +29,7 @@ import docspell.restserver.webapp.{TemplateRoutes, Templates, WebjarRoutes} import docspell.restserver.ws.{OutputEvent, WebSocketRoutes} import docspell.scheduler.impl.JobStoreModuleBuilder import docspell.store.Store + import emil.javamail.JavaMailEmil import org.http4s.HttpRoutes import org.http4s.client.Client diff --git a/modules/restserver/src/main/scala/docspell/restserver/auth/CookieData.scala b/modules/restserver/src/main/scala/docspell/restserver/auth/CookieData.scala index 577276f1..f0a4f345 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/auth/CookieData.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/auth/CookieData.scala @@ -8,6 +8,7 @@ package docspell.restserver.auth import docspell.backend.auth._ import docspell.common.{AccountInfo, LenientUri} + import org.http4s._ import org.typelevel.ci.CIString diff --git a/modules/restserver/src/main/scala/docspell/restserver/conv/AddonValidationSupport.scala b/modules/restserver/src/main/scala/docspell/restserver/conv/AddonValidationSupport.scala index 2ae9872f..cd20e6fa 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/conv/AddonValidationSupport.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/conv/AddonValidationSupport.scala @@ -7,6 +7,7 @@ package docspell.restserver.conv import cats.syntax.all._ + import docspell.addons.AddonMeta import docspell.backend.ops.AddonValidationError import docspell.backend.ops.OAddons.AddonValidationResult diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunRoutes.scala index 25eb46fc..7b7a977d 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/AddonRunRoutes.scala @@ -9,11 +9,13 @@ package docspell.restserver.routes import cats.data.NonEmptyList import cats.effect._ import cats.syntax.all._ + import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.restapi.model._ import docspell.restserver.http4s.ThrowableResponseMapper import docspell.scheduler.usertask.UserTaskScope + import org.http4s.HttpRoutes import org.http4s.circe.CirceEntityCodec._ import org.http4s.dsl.Http4sDsl diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentRoutes.scala index 39574043..d1e4ac60 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/AttachmentRoutes.scala @@ -8,6 +8,7 @@ package docspell.restserver.routes import cats.effect._ import cats.implicits._ + import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.backend.ops._ @@ -18,6 +19,7 @@ import docspell.restserver.conv.Conversions import docspell.restserver.http4s.BinaryUtil import docspell.restserver.webapp.Webjars import docspell.scheduler.usertask.UserTaskScope + import org.http4s._ import org.http4s.circe.CirceEntityDecoder._ import org.http4s.circe.CirceEntityEncoder._ diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/IntegrationEndpointRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/IntegrationEndpointRoutes.scala index f95588b3..dca7a111 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/IntegrationEndpointRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/IntegrationEndpointRoutes.scala @@ -9,12 +9,14 @@ package docspell.restserver.routes import cats.data.{EitherT, OptionT} import cats.effect._ import cats.implicits._ + import docspell.backend.BackendApp import docspell.common._ import docspell.restserver.Config import docspell.restserver.conv.Conversions._ import docspell.restserver.http4s.Responses import docspell.store.records.{RCollective, RItem} + import org.http4s._ import org.http4s.circe.CirceEntityEncoder._ import org.http4s.dsl.Http4sDsl diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/ItemMultiRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/ItemMultiRoutes.scala index cc65ac0c..cfa40f0f 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/ItemMultiRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/ItemMultiRoutes.scala @@ -8,6 +8,7 @@ package docspell.restserver.routes import cats.effect._ import cats.implicits._ + import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.backend.ops.OCustomFields.{RemoveValue, SetValue} @@ -17,6 +18,7 @@ import docspell.restserver.Config import docspell.restserver.conv.{Conversions, MultiIdSupport, NonEmptyListSupport} import docspell.restserver.http4s.ClientRequestInfo import docspell.scheduler.usertask.UserTaskScope + import org.http4s.HttpRoutes import org.http4s.circe.CirceEntityDecoder._ import org.http4s.circe.CirceEntityEncoder._ diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/ItemRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/ItemRoutes.scala index 57db1d9e..464bacff 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/ItemRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/ItemRoutes.scala @@ -9,6 +9,7 @@ package docspell.restserver.routes import cats.data.NonEmptyList import cats.effect._ import cats.implicits._ + import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.backend.ops.OCustomFields.{RemoveValue, SetValue} @@ -21,6 +22,7 @@ import docspell.restserver.http4s.ClientRequestInfo import docspell.restserver.http4s.Responses import docspell.restserver.http4s.{QueryParam => QP} import docspell.scheduler.usertask.UserTaskScope + import org.http4s.HttpRoutes import org.http4s.circe.CirceEntityDecoder._ import org.http4s.circe.CirceEntityEncoder._ diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/JobQueueRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/JobQueueRoutes.scala index 25841f25..ba596a23 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/JobQueueRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/JobQueueRoutes.scala @@ -8,12 +8,14 @@ package docspell.restserver.routes import cats.effect._ import cats.implicits._ + import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.common.Ident import docspell.restapi.model.JobPriority import docspell.restserver.conv.Conversions import docspell.scheduler.usertask.UserTaskScope + import org.http4s.HttpRoutes import org.http4s.circe.CirceEntityDecoder._ import org.http4s.circe.CirceEntityEncoder._ diff --git a/modules/restserver/src/main/scala/docspell/restserver/routes/UserRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/routes/UserRoutes.scala index 784a423a..82f040ea 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/routes/UserRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/routes/UserRoutes.scala @@ -8,6 +8,7 @@ package docspell.restserver.routes import cats.effect._ import cats.implicits._ + import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.backend.ops.OCollective @@ -15,6 +16,7 @@ import docspell.common._ import docspell.restapi.model._ import docspell.restserver.conv.Conversions._ import docspell.store.UpdateResult + import org.http4s.HttpRoutes import org.http4s.circe.CirceEntityDecoder._ import org.http4s.circe.CirceEntityEncoder._ diff --git a/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala b/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala index 44bb2229..ea3b3120 100644 --- a/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala +++ b/modules/restserver/src/main/scala/docspell/restserver/ws/WebSocketRoutes.scala @@ -10,9 +10,11 @@ import cats.effect.Async import cats.implicits._ import fs2.concurrent.Topic import fs2.{Pipe, Stream} + import docspell.backend.BackendApp import docspell.backend.auth.AuthToken import docspell.scheduler.usertask.UserTaskScope + import org.http4s.HttpRoutes import org.http4s.dsl.Http4sDsl import org.http4s.server.websocket.WebSocketBuilder2 diff --git a/modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala b/modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala index ad2afaf7..6e8ce0f1 100644 --- a/modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala +++ b/modules/scheduler/api/src/main/scala/docspell/scheduler/FindJobOwner.scala @@ -1,7 +1,14 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + package docspell.scheduler import cats.Applicative import cats.data.{Kleisli, OptionT} + import docspell.common.AccountInfo /** Strategy to find the user that submitted the job. This is used to emit events about diff --git a/modules/scheduler/api/src/main/scala/docspell/scheduler/Job.scala b/modules/scheduler/api/src/main/scala/docspell/scheduler/Job.scala index 1050645f..bb17d218 100644 --- a/modules/scheduler/api/src/main/scala/docspell/scheduler/Job.scala +++ b/modules/scheduler/api/src/main/scala/docspell/scheduler/Job.scala @@ -8,8 +8,10 @@ package docspell.scheduler import cats.effect.Sync import cats.syntax.functor._ + import docspell.common._ import docspell.scheduler.usertask.UserTaskScope + import io.circe.Encoder final case class Job[A]( diff --git a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStorePublish.scala b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStorePublish.scala index 4bb82b99..0285d6e4 100644 --- a/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStorePublish.scala +++ b/modules/scheduler/impl/src/main/scala/docspell/scheduler/impl/JobStorePublish.scala @@ -9,6 +9,7 @@ package docspell.scheduler.impl import cats.data.OptionT import cats.effect._ import cats.implicits._ + import docspell.common.{Ident, JobState} import docspell.notification.api.{Event, EventSink} import docspell.pubsub.api.PubSubT diff --git a/modules/store/src/main/scala/db/migration/common/JsonCodecs.scala b/modules/store/src/main/scala/db/migration/common/JsonCodecs.scala index 7b1dc61f..e7d18612 100644 --- a/modules/store/src/main/scala/db/migration/common/JsonCodecs.scala +++ b/modules/store/src/main/scala/db/migration/common/JsonCodecs.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + package db.migration.common import emil.MailAddress diff --git a/modules/store/src/main/scala/db/migration/common/MigrateCollectiveIdTaskArgs.scala b/modules/store/src/main/scala/db/migration/common/MigrateCollectiveIdTaskArgs.scala index d3522c7e..8871111d 100644 --- a/modules/store/src/main/scala/db/migration/common/MigrateCollectiveIdTaskArgs.scala +++ b/modules/store/src/main/scala/db/migration/common/MigrateCollectiveIdTaskArgs.scala @@ -1,11 +1,20 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + package db.migration.common -import cats.syntax.all._ import cats.effect._ +import cats.syntax.all._ + import docspell.common._ -import docspell.store.records.{RCollective, RJob, RPeriodicTask, RUser} -import doobie._ -import doobie.implicits._ +import docspell.notification.api.{PeriodicDueItemsArgs, PeriodicQueryArgs} +import docspell.store.qb.DSL._ +import docspell.store.qb._ +import docspell.store.records._ + import db.migration.data.{ AllPreviewsArgs => AllPreviewArgsLegacy, ConvertAllPdfArgs => ConvertAllPdfArgsLegacy, @@ -21,9 +30,8 @@ import db.migration.data.{ ScanMailboxArgs => ScanMailboxArgsLegacy, ScheduledAddonTaskArgs => ScheduledAddonTaskArgsLegacy } -import docspell.notification.api.{PeriodicDueItemsArgs, PeriodicQueryArgs} -import docspell.store.qb._ -import docspell.store.qb.DSL._ +import doobie._ +import doobie.implicits._ import io.circe.{Decoder, Encoder, parser} import org.flywaydb.core.api.migration.Context diff --git a/modules/store/src/main/scala/db/migration/common/MigrateDueItemTasks.scala b/modules/store/src/main/scala/db/migration/common/MigrateDueItemTasks.scala index 3a3dfbc4..c169c42f 100644 --- a/modules/store/src/main/scala/db/migration/common/MigrateDueItemTasks.scala +++ b/modules/store/src/main/scala/db/migration/common/MigrateDueItemTasks.scala @@ -1,13 +1,21 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + package db.migration.common import cats.data.{NonEmptyList, OptionT} import cats.effect.{IO, Sync} import cats.implicits._ + import docspell.common._ import docspell.common.syntax.StringSyntax._ import docspell.notification.api._ -import docspell.store.records._ import docspell.store.qb.DSL._ +import docspell.store.records._ + import db.migration.data.{ PeriodicDueItemsArgs => PeriodicDueItemsArgsLegacy, PeriodicQueryArgs => PeriodicQueryArgsLegacy, diff --git a/modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala b/modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala index 1f9a4d4d..df3d85bd 100644 --- a/modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala +++ b/modules/store/src/main/scala/db/migration/common/MigrateNotifyTasks.scala @@ -1,15 +1,23 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + package db.migration.common import cats.data.{NonEmptyList, OptionT} import cats.effect.{IO, Sync} import cats.implicits._ + import docspell.common._ import docspell.common.syntax.StringSyntax._ import docspell.notification.api._ -import docspell.store.records.{RNotificationChannelMail, RPeriodicTask} -import docspell.store.qb.DSL._ -import db.migration.data.{PeriodicDueItemsArgs => PeriodicDueItemsArgsLegacy, _} import docspell.store.qb.DML +import docspell.store.qb.DSL._ +import docspell.store.records.{RNotificationChannelMail, RPeriodicTask} + +import db.migration.data.{PeriodicDueItemsArgs => PeriodicDueItemsArgsLegacy, _} import doobie._ import doobie.implicits._ import emil.MailAddress diff --git a/modules/store/src/main/scala/db/migration/common/TransactorSupport.scala b/modules/store/src/main/scala/db/migration/common/TransactorSupport.scala index dd952ebb..2fd50cc3 100644 --- a/modules/store/src/main/scala/db/migration/common/TransactorSupport.scala +++ b/modules/store/src/main/scala/db/migration/common/TransactorSupport.scala @@ -1,7 +1,15 @@ +/* + * Copyright 2020 Eike K. & Contributors + * + * SPDX-License-Identifier: AGPL-3.0-or-later + */ + package db.migration.common import cats.effect.IO + import docspell.logging.Logger + import doobie.util.transactor.{Strategy, Transactor} import org.flywaydb.core.api.migration.Context diff --git a/modules/store/src/main/scala/db/migration/data/AllPreviewsArgs.scala b/modules/store/src/main/scala/db/migration/data/AllPreviewsArgs.scala index fcbfafc4..b1abe2a5 100644 --- a/modules/store/src/main/scala/db/migration/data/AllPreviewsArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/AllPreviewsArgs.scala @@ -7,6 +7,7 @@ package db.migration.data import docspell.common._ + import io.circe.generic.semiauto._ import io.circe.{Decoder, Encoder} diff --git a/modules/store/src/main/scala/db/migration/data/ConvertAllPdfArgs.scala b/modules/store/src/main/scala/db/migration/data/ConvertAllPdfArgs.scala index 129da96b..dcceafbb 100644 --- a/modules/store/src/main/scala/db/migration/data/ConvertAllPdfArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/ConvertAllPdfArgs.scala @@ -7,6 +7,7 @@ package db.migration.data import docspell.common._ + import io.circe._ import io.circe.generic.semiauto._ diff --git a/modules/store/src/main/scala/db/migration/data/DownloadZipArgs.scala b/modules/store/src/main/scala/db/migration/data/DownloadZipArgs.scala index 974f76ea..9c17ae44 100644 --- a/modules/store/src/main/scala/db/migration/data/DownloadZipArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/DownloadZipArgs.scala @@ -6,11 +6,12 @@ package db.migration.data -import db.migration.data.DownloadZipArgs.DownloadRequest import docspell.common._ import docspell.query.ItemQuery.Expr.ValidItemStates import docspell.query.{ItemQuery, ItemQueryParser} import docspell.store.queries.Query + +import db.migration.data.DownloadZipArgs.DownloadRequest import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} diff --git a/modules/store/src/main/scala/db/migration/data/FileIntegrityCheckArgs.scala b/modules/store/src/main/scala/db/migration/data/FileIntegrityCheckArgs.scala index ec97977c..757c56fb 100644 --- a/modules/store/src/main/scala/db/migration/data/FileIntegrityCheckArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/FileIntegrityCheckArgs.scala @@ -7,12 +7,14 @@ package db.migration.data import cats.implicits._ -import db.migration.data.FileIntegrityCheckArgs.FileKeyPart + import docspell.common.{FileCategory, Ident} -import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} -import io.circe.{Decoder, Encoder} -import io.circe.syntax._ + +import db.migration.data.FileIntegrityCheckArgs.FileKeyPart import io.circe.DecodingFailure +import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} +import io.circe.syntax._ +import io.circe.{Decoder, Encoder} /** @deprecated `FileKey` and `FileKeyPart` was replaced to use a `CollectiveId` */ final case class FileIntegrityCheckArgs(pattern: FileKeyPart) diff --git a/modules/store/src/main/scala/db/migration/data/ItemAddonTaskArgs.scala b/modules/store/src/main/scala/db/migration/data/ItemAddonTaskArgs.scala index 5049fdd3..61be8449 100644 --- a/modules/store/src/main/scala/db/migration/data/ItemAddonTaskArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/ItemAddonTaskArgs.scala @@ -7,6 +7,7 @@ package db.migration.data import docspell.common._ + import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} diff --git a/modules/store/src/main/scala/db/migration/data/PeriodicDueItemsArgs.scala b/modules/store/src/main/scala/db/migration/data/PeriodicDueItemsArgs.scala index 477924d6..93587b41 100644 --- a/modules/store/src/main/scala/db/migration/data/PeriodicDueItemsArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/PeriodicDueItemsArgs.scala @@ -7,8 +7,10 @@ package db.migration.data import cats.data.NonEmptyList + import docspell.common._ import docspell.notification.api.ChannelRef + import io.circe.generic.semiauto import io.circe.{Decoder, Encoder} diff --git a/modules/store/src/main/scala/db/migration/data/PeriodicQueryArgs.scala b/modules/store/src/main/scala/db/migration/data/PeriodicQueryArgs.scala index 5ff6d94e..496b3ecb 100644 --- a/modules/store/src/main/scala/db/migration/data/PeriodicQueryArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/PeriodicQueryArgs.scala @@ -7,8 +7,10 @@ package db.migration.data import cats.data.NonEmptyList + import docspell.common._ import docspell.notification.api.ChannelRef + import io.circe.generic.semiauto import io.circe.{Decoder, Encoder} diff --git a/modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala b/modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala index f951fed7..991adaa8 100644 --- a/modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/ProcessItemArgs.scala @@ -6,10 +6,10 @@ package db.migration.data -import ProcessItemArgs._ -import docspell.common.{FileIntegrityCheckArgs => _, _} import docspell.common.syntax.all._ +import docspell.common.{FileIntegrityCheckArgs => _, _} +import db.migration.data.ProcessItemArgs._ import io.circe._ import io.circe.generic.semiauto._ diff --git a/modules/store/src/main/scala/db/migration/data/ReIndexTaskArgs.scala b/modules/store/src/main/scala/db/migration/data/ReIndexTaskArgs.scala index a175e052..d2b02a7d 100644 --- a/modules/store/src/main/scala/db/migration/data/ReIndexTaskArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/ReIndexTaskArgs.scala @@ -7,6 +7,7 @@ package db.migration.data import docspell.common._ + import io.circe._ import io.circe.generic.semiauto._ diff --git a/modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala b/modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala index 04064ab6..c32253b4 100644 --- a/modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala +++ b/modules/store/src/main/scala/db/migration/data/ScheduledAddonTaskArgs.scala @@ -7,6 +7,7 @@ package db.migration.data import docspell.common._ + import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} diff --git a/modules/store/src/main/scala/db/migration/h2/V1_29_2__MigrateNotifyTask.scala b/modules/store/src/main/scala/db/migration/h2/V1_29_2__MigrateNotifyTask.scala index 386a4d4d..af92054b 100644 --- a/modules/store/src/main/scala/db/migration/h2/V1_29_2__MigrateNotifyTask.scala +++ b/modules/store/src/main/scala/db/migration/h2/V1_29_2__MigrateNotifyTask.scala @@ -7,6 +7,7 @@ package db.migration.h2 import cats.effect.unsafe.implicits._ + import db.migration.common.MigrateNotifyTasks import org.flywaydb.core.api.migration.BaseJavaMigration import org.flywaydb.core.api.migration.Context diff --git a/modules/store/src/main/scala/db/migration/h2/V1_32_2__MigrateChannels.scala b/modules/store/src/main/scala/db/migration/h2/V1_32_2__MigrateChannels.scala index 7d470987..b91ba9ff 100644 --- a/modules/store/src/main/scala/db/migration/h2/V1_32_2__MigrateChannels.scala +++ b/modules/store/src/main/scala/db/migration/h2/V1_32_2__MigrateChannels.scala @@ -7,6 +7,7 @@ package db.migration.h2 import cats.effect.unsafe.implicits._ + import db.migration.common.MigrateDueItemTasks import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} diff --git a/modules/store/src/main/scala/db/migration/h2/V1_39_2__MigrateTasks.scala b/modules/store/src/main/scala/db/migration/h2/V1_39_2__MigrateTasks.scala index 0d4f09e0..47489d48 100644 --- a/modules/store/src/main/scala/db/migration/h2/V1_39_2__MigrateTasks.scala +++ b/modules/store/src/main/scala/db/migration/h2/V1_39_2__MigrateTasks.scala @@ -7,6 +7,7 @@ package db.migration.h2 import cats.effect.unsafe.implicits._ + import db.migration.common.MigrateCollectiveIdTaskArgs import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} diff --git a/modules/store/src/main/scala/db/migration/mariadb/V1_29_2__MigrateNotifyTask.scala b/modules/store/src/main/scala/db/migration/mariadb/V1_29_2__MigrateNotifyTask.scala index bcdaa170..6f08c084 100644 --- a/modules/store/src/main/scala/db/migration/mariadb/V1_29_2__MigrateNotifyTask.scala +++ b/modules/store/src/main/scala/db/migration/mariadb/V1_29_2__MigrateNotifyTask.scala @@ -7,6 +7,7 @@ package db.migration.mariadb import cats.effect.unsafe.implicits._ + import db.migration.common.MigrateNotifyTasks import org.flywaydb.core.api.migration.BaseJavaMigration import org.flywaydb.core.api.migration.Context diff --git a/modules/store/src/main/scala/db/migration/mariadb/V1_32_2__MigrateChannels.scala b/modules/store/src/main/scala/db/migration/mariadb/V1_32_2__MigrateChannels.scala index 0efa9dd1..805ac208 100644 --- a/modules/store/src/main/scala/db/migration/mariadb/V1_32_2__MigrateChannels.scala +++ b/modules/store/src/main/scala/db/migration/mariadb/V1_32_2__MigrateChannels.scala @@ -7,6 +7,7 @@ package db.migration.mariadb import cats.effect.unsafe.implicits._ + import db.migration.common.MigrateDueItemTasks import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} diff --git a/modules/store/src/main/scala/db/migration/mariadb/V1_39_2__MigrateTasks.scala b/modules/store/src/main/scala/db/migration/mariadb/V1_39_2__MigrateTasks.scala index d301dd55..bbc3b39b 100644 --- a/modules/store/src/main/scala/db/migration/mariadb/V1_39_2__MigrateTasks.scala +++ b/modules/store/src/main/scala/db/migration/mariadb/V1_39_2__MigrateTasks.scala @@ -7,6 +7,7 @@ package db.migration.mariadb import cats.effect.unsafe.implicits._ + import db.migration.common.MigrateCollectiveIdTaskArgs import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} diff --git a/modules/store/src/main/scala/db/migration/postgresql/V1_29_2__MigrateNotifyTask.scala b/modules/store/src/main/scala/db/migration/postgresql/V1_29_2__MigrateNotifyTask.scala index 8bb2e132..2808a8f8 100644 --- a/modules/store/src/main/scala/db/migration/postgresql/V1_29_2__MigrateNotifyTask.scala +++ b/modules/store/src/main/scala/db/migration/postgresql/V1_29_2__MigrateNotifyTask.scala @@ -7,6 +7,7 @@ package db.migration.postgresql import cats.effect.unsafe.implicits._ + import db.migration.common.MigrateNotifyTasks import org.flywaydb.core.api.migration.BaseJavaMigration import org.flywaydb.core.api.migration.Context diff --git a/modules/store/src/main/scala/db/migration/postgresql/V1_32_2__MigrateChannels.scala b/modules/store/src/main/scala/db/migration/postgresql/V1_32_2__MigrateChannels.scala index 62703333..93bdad55 100644 --- a/modules/store/src/main/scala/db/migration/postgresql/V1_32_2__MigrateChannels.scala +++ b/modules/store/src/main/scala/db/migration/postgresql/V1_32_2__MigrateChannels.scala @@ -7,6 +7,7 @@ package db.migration.postgresql import cats.effect.unsafe.implicits._ + import db.migration.common.MigrateDueItemTasks import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} diff --git a/modules/store/src/main/scala/db/migration/postgresql/V1_39_2__MigrateTasks.scala b/modules/store/src/main/scala/db/migration/postgresql/V1_39_2__MigrateTasks.scala index 8acd6336..da7df2cd 100644 --- a/modules/store/src/main/scala/db/migration/postgresql/V1_39_2__MigrateTasks.scala +++ b/modules/store/src/main/scala/db/migration/postgresql/V1_39_2__MigrateTasks.scala @@ -7,6 +7,7 @@ package db.migration.postgresql import cats.effect.unsafe.implicits._ + import db.migration.common.MigrateCollectiveIdTaskArgs import org.flywaydb.core.api.migration.{BaseJavaMigration, Context} diff --git a/modules/store/src/main/scala/docspell/store/records/RPeriodicTask.scala b/modules/store/src/main/scala/docspell/store/records/RPeriodicTask.scala index b250d9d4..c775a184 100644 --- a/modules/store/src/main/scala/docspell/store/records/RPeriodicTask.scala +++ b/modules/store/src/main/scala/docspell/store/records/RPeriodicTask.scala @@ -9,9 +9,11 @@ package docspell.store.records import cats.data.NonEmptyList import cats.effect._ import cats.implicits._ + import docspell.common._ import docspell.store.qb.DSL._ import docspell.store.qb._ + import com.github.eikek.calev.CalEvent import doobie._ import doobie.implicits._ diff --git a/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala b/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala index 03b37fe8..fdb06b2b 100644 --- a/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala +++ b/modules/store/src/test/scala/docspell/store/fts/TempFtsOpsTest.scala @@ -7,10 +7,12 @@ package docspell.store.fts import java.time.{Instant, LocalDate} + import cats.effect.IO import cats.syntax.option._ import cats.syntax.traverse._ import fs2.Stream + import docspell.common._ import docspell.ftsclient.FtsResult import docspell.ftsclient.FtsResult.{AttachmentData, ItemMatch} @@ -19,6 +21,7 @@ import docspell.store.qb.DSL._ import docspell.store.qb._ import docspell.store.queries.{QItem, QLogin, Query} import docspell.store.records.{RCollective, RItem, RUser} + import doobie._ class TempFtsOpsTest extends DatabaseTest { diff --git a/modules/store/src/test/scala/docspell/store/migrate/MigrateTest.scala b/modules/store/src/test/scala/docspell/store/migrate/MigrateTest.scala index 0594578a..cb34fb18 100644 --- a/modules/store/src/test/scala/docspell/store/migrate/MigrateTest.scala +++ b/modules/store/src/test/scala/docspell/store/migrate/MigrateTest.scala @@ -7,7 +7,9 @@ package docspell.store.migrate import cats.effect._ + import docspell.store.{DatabaseTest, SchemaMigrateConfig, StoreFixture} + import org.flywaydb.core.api.output.MigrateResult class MigrateTest extends DatabaseTest { From 0600a263b28954aaf3a85642e25396da71a5632f Mon Sep 17 00:00:00 2001 From: eikek Date: Sun, 7 Aug 2022 16:44:44 +0200 Subject: [PATCH 14/15] Fix compile errors in website examples --- website/src/main/scala/docspell/website/Helper.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/website/src/main/scala/docspell/website/Helper.scala b/website/src/main/scala/docspell/website/Helper.scala index 82314d72..4015069d 100644 --- a/website/src/main/scala/docspell/website/Helper.scala +++ b/website/src/main/scala/docspell/website/Helper.scala @@ -1,6 +1,6 @@ package docspell.website -import docspell.common.{IdRef, Ident, Timestamp} +import docspell.common.{CollectiveId, IdRef, Ident, Timestamp} import scodec.bits.ByteVector import java.time.LocalDate @@ -12,7 +12,7 @@ trait Helper { val date20220514 = Timestamp.atUtc(LocalDate.of(2022, 5, 14).atTime(11, 22, 12)) - val cid = id("collective") + val cid = CollectiveId(1) implicit final class StringExt(self: String) { def id: Ident = Ident.unsafe(self) From 3a0a154388755ec713c91c38e04bc974876c3042 Mon Sep 17 00:00:00 2001 From: eikek Date: Sun, 7 Aug 2022 17:34:01 +0200 Subject: [PATCH 15/15] Remove unavailable website links --- website/site/content/docs/dev/adr/0007_convert_html_files.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/website/site/content/docs/dev/adr/0007_convert_html_files.md b/website/site/content/docs/dev/adr/0007_convert_html_files.md index 164505d3..20b0d736 100644 --- a/website/site/content/docs/dev/adr/0007_convert_html_files.md +++ b/website/site/content/docs/dev/adr/0007_convert_html_files.md @@ -24,8 +24,9 @@ Native (firefox) view: {{ figure(file="example-html-native.jpg") }} -Note: the example html is from -[here](https://www.sparksuite.com/open-source/invoice.html). +Note: the example html was taken from +`https://www.sparksuite.com/open-source/invoice.html` (not available +anymore). I downloaded the HTML file to disk together with its resources (using *Save as...* in the browser).