From 057ce29676a573466a84943eb6ae3a54a766b976 Mon Sep 17 00:00:00 2001 From: Yichi Yang Date: Sun, 25 Aug 2024 12:20:24 +0800 Subject: [PATCH 01/32] Refactor: Reduce number of SQL queries when serializing List[Document] (#7505) --- src/documents/serialisers.py | 116 +++++++++++++++++++++++++++-------- 1 file changed, 91 insertions(+), 25 deletions(-) diff --git a/src/documents/serialisers.py b/src/documents/serialisers.py index 0c0813aa4..747d744b6 100644 --- a/src/documents/serialisers.py +++ b/src/documents/serialisers.py @@ -2,6 +2,7 @@ import datetime import math import re import zoneinfo +from collections.abc import Iterable from decimal import Decimal import magic @@ -232,22 +233,45 @@ class OwnedObjectSerializer( ) ) - def get_is_shared_by_requester(self, obj: Document): - ctype = ContentType.objects.get_for_model(obj) + @staticmethod + def get_shared_object_pks(objects: Iterable): + """ + Return the primary keys of the subset of objects that are shared. + """ + try: + first_obj = next(iter(objects)) + except StopIteration: + return set() + + ctype = ContentType.objects.get_for_model(first_obj) + object_pks = list(obj.pk for obj in objects) + pk_type = type(first_obj.pk) + + def get_pks_for_permission_type(model): + return map( + pk_type, # coerce the pk to be the same type of the provided objects + model.objects.filter( + content_type=ctype, + object_pk__in=object_pks, + ) + .values_list("object_pk", flat=True) + .distinct(), + ) + UserObjectPermission = get_user_obj_perms_model() GroupObjectPermission = get_group_obj_perms_model() - return obj.owner == self.user and ( - UserObjectPermission.objects.filter( - content_type=ctype, - object_pk=obj.pk, - ).count() - > 0 - or GroupObjectPermission.objects.filter( - content_type=ctype, - object_pk=obj.pk, - ).count() - > 0 - ) + user_permission_pks = get_pks_for_permission_type(UserObjectPermission) + group_permission_pks = get_pks_for_permission_type(GroupObjectPermission) + + return set(user_permission_pks) | set(group_permission_pks) + + def get_is_shared_by_requester(self, obj: Document): + # First check the context to see if `shared_object_pks` is set by the parent. + shared_object_pks = self.context.get("shared_object_pks") + # If not just check if the current object is shared. + if shared_object_pks is None: + shared_object_pks = self.get_shared_object_pks([obj]) + return obj.owner == self.user and obj.id in shared_object_pks permissions = SerializerMethodField(read_only=True) user_can_change = SerializerMethodField(read_only=True) @@ -303,6 +327,14 @@ class OwnedObjectSerializer( return super().update(instance, validated_data) +class OwnedObjectListSerializer(serializers.ListSerializer): + def to_representation(self, documents): + self.child.context["shared_object_pks"] = self.child.get_shared_object_pks( + documents, + ) + return super().to_representation(documents) + + class CorrespondentSerializer(MatchingModelSerializer, OwnedObjectSerializer): last_correspondence = serializers.DateTimeField(read_only=True, required=False) @@ -863,35 +895,69 @@ class DocumentSerializer( "custom_fields", "remove_inbox_tags", ) + list_serializer_class = OwnedObjectListSerializer + + +class SearchResultListSerializer(serializers.ListSerializer): + def to_representation(self, hits): + document_ids = [hit["id"] for hit in hits] + # Fetch all Document objects in the list in one SQL query. + documents = self.child.fetch_documents(document_ids) + self.child.context["documents"] = documents + # Also check if they are shared with other users / groups. + self.child.context["shared_object_pks"] = self.child.get_shared_object_pks( + documents.values(), + ) + + return super().to_representation(hits) class SearchResultSerializer(DocumentSerializer): - def to_representation(self, instance): - doc = ( - Document.objects.select_related( + @staticmethod + def fetch_documents(ids): + """ + Return a dict that maps given document IDs to Document objects. + """ + return { + document.id: document + for document in Document.objects.select_related( "correspondent", "storage_path", "document_type", "owner", ) .prefetch_related("tags", "custom_fields", "notes") - .get(id=instance["id"]) - ) + .filter(id__in=ids) + } + + def to_representation(self, hit): + # Again we first check if the parent has already fetched the documents. + documents = self.context.get("documents") + # Otherwise we fetch this document. + if documents is None: # pragma: no cover + # In practice we only serialize **lists** of whoosh.searching.Hit. + # I'm keeping this check for completeness but marking it no cover for now. + documents = self.fetch_documents([hit["id"]]) + document = documents[hit["id"]] + notes = ",".join( - [str(c.note) for c in doc.notes.all()], + [str(c.note) for c in document.notes.all()], ) - r = super().to_representation(doc) + r = super().to_representation(document) r["__search_hit__"] = { - "score": instance.score, - "highlights": instance.highlights("content", text=doc.content), + "score": hit.score, + "highlights": hit.highlights("content", text=document.content), "note_highlights": ( - instance.highlights("notes", text=notes) if doc else None + hit.highlights("notes", text=notes) if document else None ), - "rank": instance.rank, + "rank": hit.rank, } return r + class Meta(DocumentSerializer.Meta): + list_serializer_class = SearchResultListSerializer + class SavedViewFilterRuleSerializer(serializers.ModelSerializer): class Meta: From a0c227fe55400a3cf09ca51492cd18d69302cf6f Mon Sep 17 00:00:00 2001 From: Yichi Yang Date: Sun, 25 Aug 2024 12:20:43 +0800 Subject: [PATCH 02/32] Refactor: Use django-filter logic for filtering full text search queries (#7507) --- src/documents/index.py | 147 ++++++--------------- src/documents/tests/test_api_search.py | 36 ++++- src/documents/tests/test_delayedquery.py | 161 ----------------------- src/documents/views.py | 32 +++-- 4 files changed, 86 insertions(+), 290 deletions(-) diff --git a/src/documents/index.py b/src/documents/index.py index 98c43d1e8..d95a80213 100644 --- a/src/documents/index.py +++ b/src/documents/index.py @@ -8,8 +8,8 @@ from datetime import timezone from shutil import rmtree from typing import Optional -from dateutil.parser import isoparse from django.conf import settings +from django.db.models import QuerySet from django.utils import timezone as django_timezone from guardian.shortcuts import get_users_with_perms from whoosh import classify @@ -22,6 +22,8 @@ from whoosh.fields import NUMERIC from whoosh.fields import TEXT from whoosh.fields import Schema from whoosh.highlight import HtmlFormatter +from whoosh.idsets import BitSet +from whoosh.idsets import DocIdSet from whoosh.index import FileIndex from whoosh.index import create_in from whoosh.index import exists_in @@ -31,6 +33,7 @@ from whoosh.qparser import QueryParser from whoosh.qparser.dateparse import DateParserPlugin from whoosh.qparser.dateparse import English from whoosh.qparser.plugins import FieldsPlugin +from whoosh.reading import IndexReader from whoosh.scoring import TF_IDF from whoosh.searching import ResultsPage from whoosh.searching import Searcher @@ -201,114 +204,32 @@ def remove_document_from_index(document: Document): remove_document(writer, document) +class MappedDocIdSet(DocIdSet): + """ + A DocIdSet backed by a set of `Document` IDs. + Supports efficiently looking up if a whoosh docnum is in the provided `filter_queryset`. + """ + + def __init__(self, filter_queryset: QuerySet, ixreader: IndexReader) -> None: + super().__init__() + document_ids = filter_queryset.order_by("id").values_list("id", flat=True) + max_id = document_ids.last() or 0 + self.document_ids = BitSet(document_ids, size=max_id) + self.ixreader = ixreader + + def __contains__(self, docnum): + document_id = self.ixreader.stored_fields(docnum)["id"] + return document_id in self.document_ids + + def __bool__(self): + # searcher.search ignores a filter if it's "falsy". + # We use this hack so this DocIdSet, when used as a filter, is never ignored. + return True + + class DelayedQuery: - param_map = { - "correspondent": ("correspondent", ["id", "id__in", "id__none", "isnull"]), - "document_type": ("type", ["id", "id__in", "id__none", "isnull"]), - "storage_path": ("path", ["id", "id__in", "id__none", "isnull"]), - "owner": ("owner", ["id", "id__in", "id__none", "isnull"]), - "shared_by": ("shared_by", ["id"]), - "tags": ("tag", ["id__all", "id__in", "id__none"]), - "added": ("added", ["date__lt", "date__gt"]), - "created": ("created", ["date__lt", "date__gt"]), - "checksum": ("checksum", ["icontains", "istartswith"]), - "original_filename": ("original_filename", ["icontains", "istartswith"]), - "custom_fields": ( - "custom_fields", - ["icontains", "istartswith", "id__all", "id__in", "id__none"], - ), - } - def _get_query(self): - raise NotImplementedError - - def _get_query_filter(self): - criterias = [] - for key, value in self.query_params.items(): - # is_tagged is a special case - if key == "is_tagged": - criterias.append(query.Term("has_tag", self.evalBoolean(value))) - continue - - if key == "has_custom_fields": - criterias.append( - query.Term("has_custom_fields", self.evalBoolean(value)), - ) - continue - - # Don't process query params without a filter - if "__" not in key: - continue - - # All other query params consist of a parameter and a query filter - param, query_filter = key.split("__", 1) - try: - field, supported_query_filters = self.param_map[param] - except KeyError: - logger.error(f"Unable to build a query filter for parameter {key}") - continue - - # We only support certain filters per parameter - if query_filter not in supported_query_filters: - logger.info( - f"Query filter {query_filter} not supported for parameter {param}", - ) - continue - - if query_filter == "id": - if param == "shared_by": - criterias.append(query.Term("is_shared", True)) - criterias.append(query.Term("owner_id", value)) - else: - criterias.append(query.Term(f"{field}_id", value)) - elif query_filter == "id__in": - in_filter = [] - for object_id in value.split(","): - in_filter.append( - query.Term(f"{field}_id", object_id), - ) - criterias.append(query.Or(in_filter)) - elif query_filter == "id__none": - for object_id in value.split(","): - criterias.append( - query.Not(query.Term(f"{field}_id", object_id)), - ) - elif query_filter == "isnull": - criterias.append( - query.Term(f"has_{field}", self.evalBoolean(value) is False), - ) - elif query_filter == "id__all": - for object_id in value.split(","): - criterias.append(query.Term(f"{field}_id", object_id)) - elif query_filter == "date__lt": - criterias.append( - query.DateRange(field, start=None, end=isoparse(value)), - ) - elif query_filter == "date__gt": - criterias.append( - query.DateRange(field, start=isoparse(value), end=None), - ) - elif query_filter == "icontains": - criterias.append( - query.Term(field, value), - ) - elif query_filter == "istartswith": - criterias.append( - query.Prefix(field, value), - ) - - user_criterias = get_permissions_criterias( - user=self.user, - ) - if len(criterias) > 0: - if len(user_criterias) > 0: - criterias.append(query.Or(user_criterias)) - return query.And(criterias) - else: - return query.Or(user_criterias) if len(user_criterias) > 0 else None - - def evalBoolean(self, val): - return val.lower() in {"true", "1"} + raise NotImplementedError # pragma: no cover def _get_query_sortedby(self): if "ordering" not in self.query_params: @@ -339,13 +260,19 @@ class DelayedQuery: else: return sort_fields_map[field], reverse - def __init__(self, searcher: Searcher, query_params, page_size, user): + def __init__( + self, + searcher: Searcher, + query_params, + page_size, + filter_queryset: QuerySet, + ): self.searcher = searcher self.query_params = query_params self.page_size = page_size self.saved_results = dict() self.first_score = None - self.user = user + self.filter_queryset = filter_queryset def __len__(self): page = self[0:1] @@ -361,7 +288,7 @@ class DelayedQuery: page: ResultsPage = self.searcher.search_page( q, mask=mask, - filter=self._get_query_filter(), + filter=MappedDocIdSet(self.filter_queryset, self.searcher.ixreader), pagenum=math.floor(item.start / self.page_size) + 1, pagelen=self.page_size, sortedby=sortedby, diff --git a/src/documents/tests/test_api_search.py b/src/documents/tests/test_api_search.py index c10d6c1bb..e524e7b91 100644 --- a/src/documents/tests/test_api_search.py +++ b/src/documents/tests/test_api_search.py @@ -15,6 +15,7 @@ from rest_framework.test import APITestCase from whoosh.writing import AsyncWriter from documents import index +from documents.bulk_edit import set_permissions from documents.models import Correspondent from documents.models import CustomField from documents.models import CustomFieldInstance @@ -1159,7 +1160,8 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase): [d3.id, d2.id, d1.id], ) - def test_global_search(self): + @mock.patch("documents.bulk_edit.bulk_update_documents") + def test_global_search(self, m): """ GIVEN: - Multiple documents and objects @@ -1186,11 +1188,38 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase): checksum="C", pk=3, ) + # The below two documents are owned by user2 and shouldn't show up in results! + d4 = Document.objects.create( + title="doc 4 owned by user2", + content="bank bank bank bank 4", + checksum="D", + pk=4, + ) + d5 = Document.objects.create( + title="doc 5 owned by user2", + content="bank bank bank bank 5", + checksum="E", + pk=5, + ) + + user1 = User.objects.create_user("bank user1") + user2 = User.objects.create_superuser("user2") + group1 = Group.objects.create(name="bank group1") + Group.objects.create(name="group2") + + user1.user_permissions.add( + *Permission.objects.filter(codename__startswith="view_").exclude( + content_type__app_label="admin", + ), + ) + set_permissions([4, 5], set_permissions=[], owner=user2, merge=False) with index.open_index_writer() as writer: index.update_document(writer, d1) index.update_document(writer, d2) index.update_document(writer, d3) + index.update_document(writer, d4) + index.update_document(writer, d5) correspondent1 = Correspondent.objects.create(name="bank correspondent 1") Correspondent.objects.create(name="correspondent 2") @@ -1200,10 +1229,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase): StoragePath.objects.create(name="path 2", path="path2") tag1 = Tag.objects.create(name="bank tag1") Tag.objects.create(name="tag2") - user1 = User.objects.create_superuser("bank user1") - User.objects.create_user("user2") - group1 = Group.objects.create(name="bank group1") - Group.objects.create(name="group2") + SavedView.objects.create( name="bank view", show_on_dashboard=True, diff --git a/src/documents/tests/test_delayedquery.py b/src/documents/tests/test_delayedquery.py index b0dfc2ed2..1895bd6c6 100644 --- a/src/documents/tests/test_delayedquery.py +++ b/src/documents/tests/test_delayedquery.py @@ -1,8 +1,6 @@ -from dateutil.parser import isoparse from django.test import TestCase from whoosh import query -from documents.index import DelayedQuery from documents.index import get_permissions_criterias from documents.models import User @@ -58,162 +56,3 @@ class TestDelayedQuery(TestCase): ) for user, expected in tests: self.assertEqual(get_permissions_criterias(user), expected) - - def test_no_query_filters(self): - dq = DelayedQuery(None, {}, None, None) - self.assertEqual(dq._get_query_filter(), self.has_no_owner) - - def test_date_query_filters(self): - def _get_testset(param: str): - date_str = "1970-01-01T02:44" - date_obj = isoparse(date_str) - return ( - ( - {f"{param}__date__lt": date_str}, - query.And( - [ - query.DateRange(param, start=None, end=date_obj), - self.has_no_owner, - ], - ), - ), - ( - {f"{param}__date__gt": date_str}, - query.And( - [ - query.DateRange(param, start=date_obj, end=None), - self.has_no_owner, - ], - ), - ), - ) - - query_params = ["created", "added"] - for param in query_params: - for params, expected in _get_testset(param): - dq = DelayedQuery(None, params, None, None) - got = dq._get_query_filter() - self.assertCountEqual(got, expected) - - def test_is_tagged_query_filter(self): - tests = ( - ("True", True), - ("true", True), - ("1", True), - ("False", False), - ("false", False), - ("0", False), - ("foo", False), - ) - for param, expected in tests: - dq = DelayedQuery(None, {"is_tagged": param}, None, None) - self.assertEqual( - dq._get_query_filter(), - query.And([query.Term("has_tag", expected), self.has_no_owner]), - ) - - def test_tags_query_filters(self): - # tests contains tuples of query_parameter dics and the expected whoosh query - param = "tags" - field, _ = DelayedQuery.param_map[param] - tests = ( - ( - {f"{param}__id__all": "42,43"}, - query.And( - [ - query.Term(f"{field}_id", "42"), - query.Term(f"{field}_id", "43"), - self.has_no_owner, - ], - ), - ), - # tags does not allow __id - ( - {f"{param}__id": "42"}, - self.has_no_owner, - ), - # tags does not allow __isnull - ( - {f"{param}__isnull": "true"}, - self.has_no_owner, - ), - self._get_testset__id__in(param, field), - self._get_testset__id__none(param, field), - ) - - for params, expected in tests: - dq = DelayedQuery(None, params, None, None) - got = dq._get_query_filter() - self.assertCountEqual(got, expected) - - def test_generic_query_filters(self): - def _get_testset(param: str): - field, _ = DelayedQuery.param_map[param] - return ( - ( - {f"{param}__id": "42"}, - query.And( - [ - query.Term(f"{field}_id", "42"), - self.has_no_owner, - ], - ), - ), - self._get_testset__id__in(param, field), - self._get_testset__id__none(param, field), - ( - {f"{param}__isnull": "true"}, - query.And( - [ - query.Term(f"has_{field}", False), - self.has_no_owner, - ], - ), - ), - ( - {f"{param}__isnull": "false"}, - query.And( - [ - query.Term(f"has_{field}", True), - self.has_no_owner, - ], - ), - ), - ) - - query_params = ["correspondent", "document_type", "storage_path", "owner"] - for param in query_params: - for params, expected in _get_testset(param): - dq = DelayedQuery(None, params, None, None) - got = dq._get_query_filter() - self.assertCountEqual(got, expected) - - def test_char_query_filter(self): - def _get_testset(param: str): - return ( - ( - {f"{param}__icontains": "foo"}, - query.And( - [ - query.Term(f"{param}", "foo"), - self.has_no_owner, - ], - ), - ), - ( - {f"{param}__istartswith": "foo"}, - query.And( - [ - query.Prefix(f"{param}", "foo"), - self.has_no_owner, - ], - ), - ), - ) - - query_params = ["checksum", "original_filename"] - for param in query_params: - for params, expected in _get_testset(param): - dq = DelayedQuery(None, params, None, None) - got = dq._get_query_filter() - self.assertCountEqual(got, expected) diff --git a/src/documents/views.py b/src/documents/views.py index df54546e1..c0ceef4a3 100644 --- a/src/documents/views.py +++ b/src/documents/views.py @@ -852,6 +852,8 @@ class UnifiedSearchViewSet(DocumentViewSet): ) def filter_queryset(self, queryset): + filtered_queryset = super().filter_queryset(queryset) + if self._is_search_request(): from documents import index @@ -866,10 +868,10 @@ class UnifiedSearchViewSet(DocumentViewSet): self.searcher, self.request.query_params, self.paginator.get_page_size(self.request), - self.request.user, + filter_queryset=filtered_queryset, ) else: - return super().filter_queryset(queryset) + return filtered_queryset def list(self, request, *args, **kwargs): if self._is_search_request(): @@ -1199,14 +1201,16 @@ class GlobalSearchView(PassUserMixin): from documents import index with index.open_index_searcher() as s: - q, _ = index.DelayedFullTextQuery( + fts_query = index.DelayedFullTextQuery( s, request.query_params, - 10, - request.user, - )._get_query() - results = s.search(q, limit=OBJECT_LIMIT) - docs = docs | all_docs.filter(id__in=[r["id"] for r in results]) + OBJECT_LIMIT, + filter_queryset=all_docs, + ) + results = fts_query[0:1] + docs = docs | Document.objects.filter( + id__in=[r["id"] for r in results], + ) docs = docs[:OBJECT_LIMIT] saved_views = ( SavedView.objects.filter(owner=request.user, name__icontains=query) @@ -1452,12 +1456,12 @@ class StatisticsView(APIView): { "documents_total": documents_total, "documents_inbox": documents_inbox, - "inbox_tag": inbox_tags.first().pk - if inbox_tags.exists() - else None, # backwards compatibility - "inbox_tags": [tag.pk for tag in inbox_tags] - if inbox_tags.exists() - else None, + "inbox_tag": ( + inbox_tags.first().pk if inbox_tags.exists() else None + ), # backwards compatibility + "inbox_tags": ( + [tag.pk for tag in inbox_tags] if inbox_tags.exists() else None + ), "document_file_type_counts": document_file_type_counts, "character_count": character_count, "tag_count": len(tags), From 35de04a2ce5e33778468c6172b17e1f371c0ef03 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Aug 2024 00:53:37 +0000 Subject: [PATCH 03/32] Chore(deps-dev): Bump the development group with 2 updates (#7545) * Chore(deps-dev): Bump the development group with 2 updates Bumps the development group with 2 updates: [ruff](https://github.com/astral-sh/ruff) and [mkdocs-material](https://github.com/squidfunk/mkdocs-material). Updates `ruff` from 0.6.1 to 0.6.2 - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.6.1...0.6.2) Updates `mkdocs-material` from 9.5.32 to 9.5.33 - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.32...9.5.33) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch dependency-group: development - dependency-name: mkdocs-material dependency-type: direct:development update-type: version-update:semver-patch dependency-group: development ... Signed-off-by: dependabot[bot] * Update .pre-commit-config.yaml --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: shamoon <4887959+shamoon@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- Pipfile.lock | 57 +++++++++++++++++++++-------------------- 2 files changed, 30 insertions(+), 29 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 53c196132..8ebeeef16 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: exclude: "(^Pipfile\\.lock$)" # Python hooks - repo: https://github.com/astral-sh/ruff-pre-commit - rev: 'v0.6.1' + rev: 'v0.6.2' hooks: - id: ruff - id: ruff-format diff --git a/Pipfile.lock b/Pipfile.lock index 10f623b86..a59a749b7 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -2962,11 +2962,11 @@ }, "idna": { "hashes": [ - "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc", - "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0" + "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac", + "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603" ], - "markers": "python_version >= '3.5'", - "version": "==3.7" + "markers": "python_version >= '3.6'", + "version": "==3.8" }, "imagehash": { "hashes": [ @@ -3115,12 +3115,12 @@ }, "mkdocs-material": { "hashes": [ - "sha256:38ed66e6d6768dde4edde022554553e48b2db0d26d1320b19e2e2b9da0be1120", - "sha256:f3704f46b63d31b3cd35c0055a72280bed825786eccaf19c655b44e0cd2c6b3f" + "sha256:d23a8b5e3243c9b2f29cdfe83051104a8024b767312dc8fde05ebe91ad55d89d", + "sha256:dbc79cf0fdc6e2c366aa987de8b0c9d4e2bb9f156e7466786ba2fd0f9bf7ffca" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==9.5.32" + "version": "==9.5.33" }, "mkdocs-material-extensions": { "hashes": [ @@ -3199,9 +3199,10 @@ }, "paginate": { "hashes": [ - "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d" + "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945", + "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591" ], - "version": "==0.5.6" + "version": "==0.5.7" }, "pathspec": { "hashes": [ @@ -3658,28 +3659,28 @@ }, "ruff": { "hashes": [ - "sha256:2c7477c3b9da822e2db0b4e0b59e61b8a23e87886e727b327e7dcaf06213c5cf", - "sha256:392688dbb50fecf1bf7126731c90c11a9df1c3a4cdc3f481b53e851da5634fa5", - "sha256:3a0af7ab3f86e3dc9f157a928e08e26c4b40707d0612b01cd577cc84b8905cc9", - "sha256:3bc81074971b0ffad1bd0c52284b22411f02a11a012082a76ac6da153536e014", - "sha256:45efaae53b360c81043e311cdec8a7696420b3d3e8935202c2846e7a97d4edae", - "sha256:5278d3e095ccc8c30430bcc9bc550f778790acc211865520f3041910a28d0024", - "sha256:99d7ae0df47c62729d58765c593ea54c2546d5de213f2af2a19442d50a10cec9", - "sha256:9eb18dfd7b613eec000e3738b3f0e4398bf0153cb80bfa3e351b3c1c2f6d7b15", - "sha256:9fb4c4e8b83f19c9477a8745e56d2eeef07a7ff50b68a6998f7d9e2e3887bdc4", - "sha256:af3ffd8c6563acb8848d33cd19a69b9bfe943667f0419ca083f8ebe4224a3436", - "sha256:b2e0dd11e2ae553ee5c92a81731d88a9883af8db7408db47fc81887c1f8b672e", - "sha256:b4bb7de6a24169dc023f992718a9417380301b0c2da0fe85919f47264fb8add9", - "sha256:bc60c7d71b732c8fa73cf995efc0c836a2fd8b9810e115be8babb24ae87e0850", - "sha256:c2ebfc8f51ef4aca05dad4552bbcf6fe8d1f75b2f6af546cc47cc1c1ca916b5b", - "sha256:c62bc04c6723a81e25e71715aa59489f15034d69bf641df88cb38bdc32fd1dbb", - "sha256:d812615525a34ecfc07fd93f906ef5b93656be01dfae9a819e31caa6cfe758a1", - "sha256:faaa4060f4064c3b7aaaa27328080c932fa142786f8142aff095b42b6a2eb631", - "sha256:fe6d5f65d6f276ee7a0fc50a0cecaccb362d30ef98a110f99cac1c7872df2f18" + "sha256:094600ee88cda325988d3f54e3588c46de5c18dae09d683ace278b11f9d4d534", + "sha256:1175d39faadd9a50718f478d23bfc1d4da5743f1ab56af81a2b6caf0a2394f23", + "sha256:17002fe241e76544448a8e1e6118abecbe8cd10cf68fde635dad480dba594570", + "sha256:239ee6beb9e91feb8e0ec384204a763f36cb53fb895a1a364618c6abb076b3be", + "sha256:279d5f7d86696df5f9549b56b9b6a7f6c72961b619022b5b7999b15db392a4da", + "sha256:2aed7e243be68487aa8982e91c6e260982d00da3f38955873aecd5a9204b1d66", + "sha256:316d418fe258c036ba05fbf7dfc1f7d3d4096db63431546163b472285668132b", + "sha256:3dbeac76ed13456f8158b8f4fe087bf87882e645c8e8b606dd17b0b66c2c1158", + "sha256:5b939f9c86d51635fe486585389f54582f0d65b8238e08c327c1534844b3bb9a", + "sha256:5c8cbc6252deb3ea840ad6a20b0f8583caab0c5ef4f9cca21adc5a92b8f79f3c", + "sha256:7438a7288f9d67ed3c8ce4d059e67f7ed65e9fe3aa2ab6f5b4b3610e57e3cb56", + "sha256:7db6880c53c56addb8638fe444818183385ec85eeada1d48fc5abe045301b2f1", + "sha256:a8f310d63af08f583363dfb844ba8f9417b558199c58a5999215082036d795a1", + "sha256:d0d62ca91219f906caf9b187dea50d17353f15ec9bb15aae4a606cd697b49b4c", + "sha256:d371f7fc9cec83497fe7cf5eaf5b76e22a8efce463de5f775a1826197feb9df8", + "sha256:d72b8b3abf8a2d51b7b9944a41307d2f442558ccb3859bbd87e6ae9be1694a5d", + "sha256:d9f3469c7dd43cd22eb1c3fc16926fb8258d50cb1b216658a07be95dd117b0f2", + "sha256:f28fcd2cd0e02bdf739297516d5643a945cc7caf09bd9bcb4d932540a5ea4fa9" ], "index": "pypi", "markers": "python_version >= '3.7'", - "version": "==0.6.1" + "version": "==0.6.2" }, "scipy": { "hashes": [ From 6a7274c414ac5ec83f9086e669da75854fd9d954 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 19:48:12 -0700 Subject: [PATCH 04/32] Chore(deps): Bump micromatch from 4.0.5 to 4.0.8 in /src-ui (#7551) Bumps [micromatch](https://github.com/micromatch/micromatch) from 4.0.5 to 4.0.8. - [Release notes](https://github.com/micromatch/micromatch/releases) - [Changelog](https://github.com/micromatch/micromatch/blob/master/CHANGELOG.md) - [Commits](https://github.com/micromatch/micromatch/compare/4.0.5...4.0.8) --- updated-dependencies: - dependency-name: micromatch dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src-ui/package-lock.json | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src-ui/package-lock.json b/src-ui/package-lock.json index 217417194..dd61e1097 100644 --- a/src-ui/package-lock.json +++ b/src-ui/package-lock.json @@ -7621,11 +7621,11 @@ } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -10222,9 +10222,9 @@ } }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dependencies": { "to-regex-range": "^5.0.1" }, @@ -14331,11 +14331,11 @@ } }, "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { From e21552e053f23ad70e5263944550507e478d0f1a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Aug 2024 16:31:23 +0000 Subject: [PATCH 05/32] Chore(deps): Bump pathvalidate in the small-changes group (#7548) Bumps the small-changes group with 1 update: [pathvalidate](https://github.com/thombashi/pathvalidate). Updates `pathvalidate` from 3.2.0 to 3.2.1 - [Release notes](https://github.com/thombashi/pathvalidate/releases) - [Changelog](https://github.com/thombashi/pathvalidate/blob/master/CHANGELOG.md) - [Commits](https://github.com/thombashi/pathvalidate/compare/v3.2.0...v3.2.1) --- updated-dependencies: - dependency-name: pathvalidate dependency-type: direct:production update-type: version-update:semver-patch dependency-group: small-changes ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Pipfile.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index a59a749b7..79039d3e8 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1223,12 +1223,12 @@ }, "pathvalidate": { "hashes": [ - "sha256:5e8378cf6712bff67fbe7a8307d99fa8c1a0cb28aa477056f8fc374f0dff24ad", - "sha256:cc593caa6299b22b37f228148257997e2fa850eea2daf7e4cc9205cef6908dee" + "sha256:9a6255eb8f63c9e2135b9be97a5ce08f10230128c4ae7b3e935378b82b22c4c9", + "sha256:f5d07b1e2374187040612a1fcd2bcb2919f8db180df254c9581bb90bf903377d" ], "index": "pypi", "markers": "python_version >= '3.7'", - "version": "==3.2.0" + "version": "==3.2.1" }, "pdf2image": { "hashes": [ From eaaaa575b8cb23cf463ffccaa1f0a6f6e2af29d4 Mon Sep 17 00:00:00 2001 From: Dennis Melzer Date: Tue, 27 Aug 2024 18:31:46 +0200 Subject: [PATCH 06/32] Enhancement: allow multiple filename attachment exclusion patterns for a mail rule (#5524) --------- Co-authored-by: shamoon <4887959+shamoon@users.noreply.github.com> --- docs/usage.md | 3 ++- src/paperless_mail/mail.py | 26 +++++++++++++++++++++----- src/paperless_mail/tests/test_mail.py | 12 ++++++++++++ 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/docs/usage.md b/docs/usage.md index a20e5ea4a..bf45d4982 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -136,7 +136,8 @@ These rules perform the following: Paperless will check all emails only once and completely ignore messages that do not match your filters. It will also only perform the rule action -on e-mails that it has consumed documents from. +on e-mails that it has consumed documents from. The filename attachment +exclusion pattern can include multiple patterns separated by a comma. The actions all ensure that the same mail is not consumed twice by different means. These are as follows: diff --git a/src/paperless_mail/mail.py b/src/paperless_mail/mail.py index 50435de5d..83771dbf5 100644 --- a/src/paperless_mail/mail.py +++ b/src/paperless_mail/mail.py @@ -656,6 +656,24 @@ class MailAccountHandler(LoggingMixin): return processed_elements + def filename_exclusion_matches( + self, + filter_attachment_filename_exclude: Optional[str], + filename: str, + ) -> bool: + if filter_attachment_filename_exclude: + filter_attachment_filename_exclusions = ( + filter_attachment_filename_exclude.split(",") + ) + + # Force the filename and pattern to the lowercase + # as this is system dependent otherwise + filename = filename.lower() + for filename_exclude in filter_attachment_filename_exclusions: + if filename_exclude and fnmatch(filename, filename_exclude.lower()): + return True + return False + def _process_attachments( self, message: MailMessage, @@ -692,12 +710,10 @@ class MailAccountHandler(LoggingMixin): f"does not match pattern {rule.filter_attachment_filename_include}", ) continue - elif rule.filter_attachment_filename_exclude and fnmatch( - att.filename.lower(), - rule.filter_attachment_filename_exclude.lower(), + elif self.filename_exclusion_matches( + rule.filter_attachment_filename_exclude, + att.filename, ): - # Force the filename and pattern to the lowercase - # as this is system dependent otherwise self.log.debug( f"Rule {rule}: " f"Skipping attachment {att.filename} " diff --git a/src/paperless_mail/tests/test_mail.py b/src/paperless_mail/tests/test_mail.py index 26130b643..0920f033c 100644 --- a/src/paperless_mail/tests/test_mail.py +++ b/src/paperless_mail/tests/test_mail.py @@ -592,6 +592,18 @@ class TestMail( exclude_pattern="f1*", expected_matches=["f2.pdf", "f3.pdf", "file.PDf"], ), + FilterTestCase( + "PDF Files without f1 and f2", + include_pattern="*.pdf", + exclude_pattern="f1*,f2*", + expected_matches=["f3.pdf", "file.PDf"], + ), + FilterTestCase( + "PDF Files without f1 and f2 and f3", + include_pattern="*.pdf", + exclude_pattern="f1*,f2*,f3*", + expected_matches=["file.PDf"], + ), FilterTestCase( "All Files, no PNG", include_pattern="*", From 807f788f925f540c02ba26f40bc3bae604597d1a Mon Sep 17 00:00:00 2001 From: shamoon <4887959+shamoon@users.noreply.github.com> Date: Wed, 28 Aug 2024 07:32:46 -0700 Subject: [PATCH 07/32] Fix: update ng-bootstrap to fix datepicker bug (#7567) --- src-ui/package-lock.json | 8 ++++---- src-ui/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src-ui/package-lock.json b/src-ui/package-lock.json index dd61e1097..a5d6b5aab 100644 --- a/src-ui/package-lock.json +++ b/src-ui/package-lock.json @@ -18,7 +18,7 @@ "@angular/platform-browser": "~18.1.3", "@angular/platform-browser-dynamic": "~18.1.3", "@angular/router": "~18.1.3", - "@ng-bootstrap/ng-bootstrap": "^17.0.0", + "@ng-bootstrap/ng-bootstrap": "^17.0.1", "@ng-select/ng-select": "^13.5.0", "@ngneat/dirty-check-forms": "^3.0.3", "@popperjs/core": "^2.11.8", @@ -5033,9 +5033,9 @@ ] }, "node_modules/@ng-bootstrap/ng-bootstrap": { - "version": "17.0.0", - "resolved": "https://registry.npmjs.org/@ng-bootstrap/ng-bootstrap/-/ng-bootstrap-17.0.0.tgz", - "integrity": "sha512-hTbBtozJlpevF1RO6J2adCoXiAkMTPV3wmXIyK05dVha4VsKjHibgaL6YldToKoh6ElQnIYkPEIJHX9z5EtyMw==", + "version": "17.0.1", + "resolved": "https://registry.npmjs.org/@ng-bootstrap/ng-bootstrap/-/ng-bootstrap-17.0.1.tgz", + "integrity": "sha512-utbm8OXIoqVVYGVzQkOS773ymbjc+UMkXv8lyi7hTqLhCQs0rZ0yA74peqVZRuOGXLHgcSTA7fnJhA80iQOblw==", "dependencies": { "tslib": "^2.3.0" }, diff --git a/src-ui/package.json b/src-ui/package.json index 576b9b69a..ad80d1fd6 100644 --- a/src-ui/package.json +++ b/src-ui/package.json @@ -20,7 +20,7 @@ "@angular/platform-browser": "~18.1.3", "@angular/platform-browser-dynamic": "~18.1.3", "@angular/router": "~18.1.3", - "@ng-bootstrap/ng-bootstrap": "^17.0.0", + "@ng-bootstrap/ng-bootstrap": "^17.0.1", "@ng-select/ng-select": "^13.5.0", "@ngneat/dirty-check-forms": "^3.0.3", "@popperjs/core": "^2.11.8", From f5ec6de04747c8a6c163aa037ccc1f5b54e6e18b Mon Sep 17 00:00:00 2001 From: shamoon <4887959+shamoon@users.noreply.github.com> Date: Wed, 28 Aug 2024 16:39:14 -0700 Subject: [PATCH 08/32] Fix: correct select field wrapping with long text (#7572) --- .../components/common/input/select/select.component.scss | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src-ui/src/app/components/common/input/select/select.component.scss b/src-ui/src/app/components/common/input/select/select.component.scss index 31466423f..4a6f6da61 100644 --- a/src-ui/src/app/components/common/input/select/select.component.scss +++ b/src-ui/src/app/components/common/input/select/select.component.scss @@ -27,3 +27,11 @@ background-position: right calc(0.375em + 0.1875rem) center !important; background-size: calc(0.75em + 0.375rem) calc(0.75em + 0.375rem) !important; } + +.input-group .ng-select-taggable:first-child:nth-last-child(2) { + max-width: calc(100% - 45px); // fudge factor for (1x) ng-select button width +} + +.input-group .ng-select-taggable:first-child:nth-last-child(3) { + max-width: calc(100% - 90px); // fudge factor for (2x) ng-select button width +} From ce663398e6688ae4259be6a7b94e025395ada94e Mon Sep 17 00:00:00 2001 From: Daniel Bankmann <6322723+dbankmann@users.noreply.github.com> Date: Thu, 29 Aug 2024 02:22:44 +0200 Subject: [PATCH 09/32] Enhancement: mail message preprocessor for gpg encrypted mails (#7456) --------- Co-authored-by: shamoon <4887959+shamoon@users.noreply.github.com> --- Dockerfile | 2 + docs/advanced_usage.md | 54 ++ docs/configuration.md | 12 + src/paperless/settings.py | 9 + src/paperless_mail/mail.py | 30 ++ src/paperless_mail/preprocessor.py | 103 ++++ src/paperless_mail/tests/test_mail.py | 474 ++++++++++-------- src/paperless_mail/tests/test_preprocessor.py | 228 +++++++++ 8 files changed, 695 insertions(+), 217 deletions(-) create mode 100644 src/paperless_mail/preprocessor.py create mode 100644 src/paperless_mail/tests/test_preprocessor.py diff --git a/Dockerfile b/Dockerfile index 2a9d7b306..4ef558712 100644 --- a/Dockerfile +++ b/Dockerfile @@ -275,6 +275,8 @@ RUN set -eux \ && mkdir --parents --verbose /usr/src/paperless/media \ && mkdir --parents --verbose /usr/src/paperless/consume \ && mkdir --parents --verbose /usr/src/paperless/export \ + && echo "Creating gnupg directory" \ + && mkdir -m700 --verbose /usr/src/paperless/.gnupg \ && echo "Adjusting all permissions" \ && chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless \ && echo "Collecting static files" \ diff --git a/docs/advanced_usage.md b/docs/advanced_usage.md index 5488659a2..fe7099e2d 100644 --- a/docs/advanced_usage.md +++ b/docs/advanced_usage.md @@ -690,3 +690,57 @@ More details about configuration option for various providers can be found in th Once external auth is set up, 'regular' login can be disabled with the [PAPERLESS_DISABLE_REGULAR_LOGIN](configuration.md#PAPERLESS_DISABLE_REGULAR_LOGIN) setting and / or users can be automatically redirected with the [PAPERLESS_REDIRECT_LOGIN_TO_SSO](configuration.md#PAPERLESS_REDIRECT_LOGIN_TO_SSO) setting. + +## Decryption of encrypted emails before consumption {#gpg-decryptor} + +Paperless-ngx can be configured to decrypt gpg encrypted emails before consumption. + +### Requirements + +You need a recent version of `gpg-agent >= 2.1.1` installed on your host. +Your host needs to be setup for decrypting your emails via `gpg-agent`, see this [tutorial](https://www.digitalocean.com/community/tutorials/how-to-use-gpg-to-encrypt-and-sign-messages#encrypt-and-decrypt-messages-with-gpg) for instance. +Test your setup and make sure that you can encrypt and decrypt files using your key + +``` +gpg --encrypt --armor -r person@email.com name_of_file +gpg --decrypt name_of_file.asc +``` + +### Setup + +First, enable the [PAPERLESS_GPG_DECRYPTOR environment variable](configuration.md#PAPERLESS_GPG_DECRYPTOR). + +Then determine your local `gpg-agent.extra` socket by invoking + +``` +gpgconf --list-dir agent-extra-socket +``` + +on your host. A possible output is `~/.gnupg/S.gpg-agent.extra`. +Also find the location of your public keyring. + +If using docker, you'll need to add the following volume mounts to your `docker-compose.yml` file: + +```yaml +webserver: + volumes: + - /home/user/.gnupg/pubring.gpg:/usr/src/paperless/.gnupg/pubring.gpg + - :/usr/src/paperless/.gnupg/S.gpg-agent +``` + +For a 'bare-metal' installation no further configuration is necessary. If you +want to use a separate `GNUPG_HOME`, you can do so by configuring the [PAPERLESS_EMAIL_GNUPG_HOME environment variable](configuration.md#PAPERLESS_EMAIL_GNUPG_HOME). + +### Troubleshooting + +- Make sure, that `gpg-agent` is running on your host machine +- Make sure, that encryption and decryption works from inside the container using the `gpg` commands from above. +- Check that all files in `/usr/src/paperless/.gnupg` have correct permissions + +```shell +paperless@9da1865df327:~/.gnupg$ ls -al +drwx------ 1 paperless paperless 4096 Aug 18 17:52 . +drwxr-xr-x 1 paperless paperless 4096 Aug 18 17:52 .. +srw------- 1 paperless paperless 0 Aug 18 17:22 S.gpg-agent +-rw------- 1 paperless paperless 147940 Jul 24 10:23 pubring.gpg +``` diff --git a/docs/configuration.md b/docs/configuration.md index e719e043d..7172afcb3 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -1149,6 +1149,18 @@ within your documents. second, and year last order. Characters D, M, or Y can be shuffled to meet the required order. +#### [`PAPERLESS_GPG_DECRYPTOR=`](#PAPERLESS_GPG_DECRYPTOR) {#PAPERLESS_GPG_DECRYPTOR} + +: Enable or disable the GPG decryptor for encrypted emails. See [GPG Decryptor](advanced_usage.md#gpg-decryptor) for more information. + + Defaults to false. + +#### [`PAPERLESS_EMAIL_GNUPG_HOME=`](#PAPERLESS_EMAIL_GNUPG_HOME) {#PAPERLESS_EMAIL_GNUPG_HOME} + +: Optional, sets the `GNUPG_HOME` path to use with GPG decryptor for encrypted emails. See [GPG Decryptor](advanced_usage.md#gpg-decryptor) for more information. If not set, defaults to the default `GNUPG_HOME` path. + + Defaults to . + ### Polling {#polling} #### [`PAPERLESS_CONSUMER_POLLING=`](#PAPERLESS_CONSUMER_POLLING) {#PAPERLESS_CONSUMER_POLLING} diff --git a/src/paperless/settings.py b/src/paperless/settings.py index 000904aef..27c609e8d 100644 --- a/src/paperless/settings.py +++ b/src/paperless/settings.py @@ -1171,6 +1171,15 @@ if DEBUG: # pragma: no cover EMAIL_BACKEND = "django.core.mail.backends.filebased.EmailBackend" EMAIL_FILE_PATH = BASE_DIR / "sent_emails" +############################################################################### +# Email Preprocessors # +############################################################################### + +EMAIL_GNUPG_HOME: Final[Optional[str]] = os.getenv("PAPERLESS_EMAIL_GNUPG_HOME") +EMAIL_ENABLE_GPG_DECRYPTOR: Final[bool] = __get_boolean( + "PAPERLESS_ENABLE_GPG_DECRYPTOR", +) + ############################################################################### # Soft Delete diff --git a/src/paperless_mail/mail.py b/src/paperless_mail/mail.py index 83771dbf5..92c471845 100644 --- a/src/paperless_mail/mail.py +++ b/src/paperless_mail/mail.py @@ -43,6 +43,8 @@ from documents.tasks import consume_file from paperless_mail.models import MailAccount from paperless_mail.models import MailRule from paperless_mail.models import ProcessedMail +from paperless_mail.preprocessor import MailMessageDecryptor +from paperless_mail.preprocessor import MailMessagePreprocessor # Apple Mail sets multiple IMAP KEYWORD and the general "\Flagged" FLAG # imaplib => conn.fetch(b"", "FLAGS") @@ -426,9 +428,30 @@ class MailAccountHandler(LoggingMixin): logging_name = "paperless_mail" + _message_preprocessor_types: list[type[MailMessagePreprocessor]] = [ + MailMessageDecryptor, + ] + def __init__(self) -> None: super().__init__() self.renew_logging_group() + self._init_preprocessors() + + def _init_preprocessors(self): + self._message_preprocessors: list[MailMessagePreprocessor] = [] + for preprocessor_type in self._message_preprocessor_types: + self._init_preprocessor(preprocessor_type) + + def _init_preprocessor(self, preprocessor_type): + if preprocessor_type.able_to_run(): + try: + self._message_preprocessors.append(preprocessor_type()) + except Exception as e: + self.log.warning( + f"Error while initializing preprocessor {preprocessor_type.NAME}: {e}", + ) + else: + self.log.debug(f"Skipping mail preprocessor {preprocessor_type.NAME}") def _correspondent_from_name(self, name: str) -> Optional[Correspondent]: try: @@ -535,6 +558,11 @@ class MailAccountHandler(LoggingMixin): return total_processed_files + def _preprocess_message(self, message: MailMessage): + for preprocessor in self._message_preprocessors: + message = preprocessor.run(message) + return message + def _handle_mail_rule( self, M: MailBox, @@ -613,6 +641,8 @@ class MailAccountHandler(LoggingMixin): return total_processed_files def _handle_message(self, message, rule: MailRule) -> int: + message = self._preprocess_message(message) + processed_elements = 0 # Skip Message handling when only attachments are to be processed but diff --git a/src/paperless_mail/preprocessor.py b/src/paperless_mail/preprocessor.py new file mode 100644 index 000000000..7e0c76780 --- /dev/null +++ b/src/paperless_mail/preprocessor.py @@ -0,0 +1,103 @@ +import abc +import os +from email import message_from_bytes +from email import policy +from email.message import Message + +from django.conf import settings +from gnupg import GPG +from imap_tools import MailMessage + +from documents.loggers import LoggingMixin + + +class MailMessagePreprocessor(abc.ABC): + """ + Defines the interface for preprocessors that alter messages before they are handled in MailAccountHandler + """ + + NAME: str = "MailMessagePreprocessor" + + @staticmethod + @abc.abstractmethod + def able_to_run() -> bool: + """ + Return True if the conditions are met for the preprocessor to run, False otherwise + + If False, run(message) will not be called + """ + + @abc.abstractmethod + def run(self, message: MailMessage) -> MailMessage: + """ + Performs the actual preprocessing task + """ + + +class MailMessageDecryptor(MailMessagePreprocessor, LoggingMixin): + logging_name = "paperless_mail_message_decryptor" + + NAME = "MailMessageDecryptor" + + def __init__(self): + super().__init__() + self.renew_logging_group() + self._gpg = GPG(gnupghome=settings.EMAIL_GNUPG_HOME) + + @staticmethod + def able_to_run() -> bool: + if not settings.EMAIL_ENABLE_GPG_DECRYPTOR: + return False + if settings.EMAIL_GNUPG_HOME is None: + return True + return os.path.isdir(settings.EMAIL_GNUPG_HOME) + + def run(self, message: MailMessage) -> MailMessage: + if not hasattr(message, "obj"): + self.log.debug("Message does not have 'obj' attribute") + return message + if message.obj.get_content_type() != "multipart/encrypted": + self.log.debug("Message not encrypted. Keep unchanged") + return message + + self.log.debug("Message is encrypted.") + email_message = self._to_email_message(message) + decrypted_raw_message = self._gpg.decrypt(email_message.as_string()) + + if not decrypted_raw_message.ok: + self.log.debug( + f"Message decryption failed with status message " + f"{decrypted_raw_message.status}", + ) + raise Exception( + f"Decryption failed: {decrypted_raw_message.status}, {decrypted_raw_message.stderr}", + ) + self.log.debug("Message decrypted successfully.") + + decrypted_message = self._build_decrypted_message( + decrypted_raw_message, + email_message, + ) + + return MailMessage( + [(f"UID {message.uid}".encode(), decrypted_message.as_bytes())], + ) + + @staticmethod + def _to_email_message(message: MailMessage) -> Message: + email_message = message_from_bytes( + message.obj.as_bytes(), + policy=policy.default, + ) + return email_message + + @staticmethod + def _build_decrypted_message(decrypted_raw_message, email_message): + decrypted_message = message_from_bytes( + decrypted_raw_message.data, + policy=policy.default, + ) + for header, value in email_message.items(): + if not decrypted_message.get(header): + decrypted_message.add_header(header, value) + return decrypted_message diff --git a/src/paperless_mail/tests/test_mail.py b/src/paperless_mail/tests/test_mail.py index 0920f033c..d671021bf 100644 --- a/src/paperless_mail/tests/test_mail.py +++ b/src/paperless_mail/tests/test_mail.py @@ -193,31 +193,10 @@ def fake_magic_from_buffer(buffer, mime=False): return "Some verbose file description" -@mock.patch("paperless_mail.mail.magic.from_buffer", fake_magic_from_buffer) -class TestMail( - DirectoriesMixin, - FileSystemAssertsMixin, - TestCase, -): - def setUp(self): +class MessageBuilder: + def __init__(self): self._used_uids = set() - self.bogus_mailbox = BogusMailBox() - - patcher = mock.patch("paperless_mail.mail.MailBox") - m = patcher.start() - m.return_value = self.bogus_mailbox - self.addCleanup(patcher.stop) - - patcher = mock.patch("paperless_mail.mail.queue_consumption_tasks") - self._queue_consumption_tasks_mock = patcher.start() - self.addCleanup(patcher.stop) - - self.reset_bogus_mailbox() - - self.mail_account_handler = MailAccountHandler() - super().setUp() - def create_message( self, attachments: Union[int, list[_AttachmentDef]] = 1, @@ -283,39 +262,132 @@ class TestMail( return imap_msg - def reset_bogus_mailbox(self): - self.bogus_mailbox.messages = [] - self.bogus_mailbox.messages_spam = [] - self.bogus_mailbox.messages.append( - self.create_message( - subject="Invoice 1", - from_="amazon@amazon.de", - to=["me@myselfandi.com", "helpdesk@mydomain.com"], - body="cables", - seen=True, - flagged=False, - processed=False, - ), + +def reset_bogus_mailbox(bogus_mailbox: BogusMailBox, message_builder: MessageBuilder): + bogus_mailbox.messages = [] + bogus_mailbox.messages_spam = [] + bogus_mailbox.messages.append( + message_builder.create_message( + subject="Invoice 1", + from_="amazon@amazon.de", + to=["me@myselfandi.com", "helpdesk@mydomain.com"], + body="cables", + seen=True, + flagged=False, + processed=False, + ), + ) + bogus_mailbox.messages.append( + message_builder.create_message( + subject="Invoice 2", + body="from my favorite electronic store", + to=["invoices@mycompany.com"], + seen=False, + flagged=True, + processed=True, + ), + ) + bogus_mailbox.messages.append( + message_builder.create_message( + subject="Claim your $10M price now!", + from_="amazon@amazon-some-indian-site.org", + to=["special@me.me"], + seen=False, + ), + ) + bogus_mailbox.updateClient() + + +class MailMocker(DirectoriesMixin, FileSystemAssertsMixin, TestCase): + def setUp(self): + self.bogus_mailbox = BogusMailBox() + self.messageBuilder = MessageBuilder() + + reset_bogus_mailbox(self.bogus_mailbox, self.messageBuilder) + + patcher = mock.patch("paperless_mail.mail.MailBox") + m = patcher.start() + m.return_value = self.bogus_mailbox + self.addCleanup(patcher.stop) + + patcher = mock.patch("paperless_mail.mail.queue_consumption_tasks") + self._queue_consumption_tasks_mock = patcher.start() + self.addCleanup(patcher.stop) + + super().setUp() + + def assert_queue_consumption_tasks_call_args( + self, + expected_call_args: list[list[dict[str, str]]], + ): + """ + Verifies that queue_consumption_tasks has been called with the expected arguments. + + expected_call_args is the following format: + + * List of calls to queue_consumption_tasks, called once per mail, where each element is: + * List of signatures for the consume_file task, where each element is: + * dictionary containing arguments that need to be present in the consume_file signature. + + """ + + # assert number of calls to queue_consumption_tasks match + self.assertEqual( + len(self._queue_consumption_tasks_mock.call_args_list), + len(expected_call_args), ) - self.bogus_mailbox.messages.append( - self.create_message( - subject="Invoice 2", - body="from my favorite electronic store", - to=["invoices@mycompany.com"], - seen=False, - flagged=True, - processed=True, - ), - ) - self.bogus_mailbox.messages.append( - self.create_message( - subject="Claim your $10M price now!", - from_="amazon@amazon-some-indian-site.org", - to="special@me.me", - seen=False, - ), - ) - self.bogus_mailbox.updateClient() + + for (mock_args, mock_kwargs), expected_signatures in zip( + self._queue_consumption_tasks_mock.call_args_list, + expected_call_args, + ): + consume_tasks = mock_kwargs["consume_tasks"] + + # assert number of consume_file tasks match + self.assertEqual(len(consume_tasks), len(expected_signatures)) + + for consume_task, expected_signature in zip( + consume_tasks, + expected_signatures, + ): + input_doc, overrides = consume_task.args + + # assert the file exists + self.assertIsFile(input_doc.original_file) + + # assert all expected arguments are present in the signature + for key, value in expected_signature.items(): + if key == "override_correspondent_id": + self.assertEqual(overrides.correspondent_id, value) + elif key == "override_filename": + self.assertEqual(overrides.filename, value) + elif key == "override_title": + self.assertEqual(overrides.title, value) + else: + self.fail("No match for expected arg") + + def apply_mail_actions(self): + """ + Applies pending actions to mails by inspecting calls to the queue_consumption_tasks method. + """ + for args, kwargs in self._queue_consumption_tasks_mock.call_args_list: + message = kwargs["message"] + rule = kwargs["rule"] + apply_mail_action([], rule.pk, message.uid, message.subject, message.date) + + +@mock.patch("paperless_mail.mail.magic.from_buffer", fake_magic_from_buffer) +class TestMail( + DirectoriesMixin, + FileSystemAssertsMixin, + TestCase, +): + def setUp(self): + self.mailMocker = MailMocker() + self.mailMocker.setUp() + self.mail_account_handler = MailAccountHandler() + + super().setUp() def test_get_correspondent(self): message = namedtuple("MailMessage", []) @@ -399,7 +471,7 @@ class TestMail( self.assertEqual(handler._get_title(message, att, rule), None) def test_handle_message(self): - message = self.create_message( + message = self.mailMocker.messageBuilder.create_message( subject="the message title", from_="Myself", attachments=2, @@ -416,9 +488,9 @@ class TestMail( self.assertEqual(result, 2) - self._queue_consumption_tasks_mock.assert_called() + self.mailMocker._queue_consumption_tasks_mock.assert_called() - self.assert_queue_consumption_tasks_call_args( + self.mailMocker.assert_queue_consumption_tasks_call_args( [ [ {"override_title": "file_0", "override_filename": "file_0.pdf"}, @@ -435,11 +507,11 @@ class TestMail( result = self.mail_account_handler._handle_message(message, rule) - self._queue_consumption_tasks_mock.assert_not_called() + self.mailMocker._queue_consumption_tasks_mock.assert_not_called() self.assertEqual(result, 0) def test_handle_unknown_mime_type(self): - message = self.create_message( + message = self.mailMocker.messageBuilder.create_message( attachments=[ _AttachmentDef(filename="f1.pdf"), _AttachmentDef( @@ -459,7 +531,7 @@ class TestMail( result = self.mail_account_handler._handle_message(message, rule) self.assertEqual(result, 1) - self.assert_queue_consumption_tasks_call_args( + self.mailMocker.assert_queue_consumption_tasks_call_args( [ [ {"override_filename": "f1.pdf"}, @@ -468,7 +540,7 @@ class TestMail( ) def test_handle_disposition(self): - message = self.create_message( + message = self.mailMocker.messageBuilder.create_message( attachments=[ _AttachmentDef( filename="f1.pdf", @@ -487,7 +559,7 @@ class TestMail( result = self.mail_account_handler._handle_message(message, rule) self.assertEqual(result, 1) - self.assert_queue_consumption_tasks_call_args( + self.mailMocker.assert_queue_consumption_tasks_call_args( [ [ {"override_filename": "f2.pdf"}, @@ -496,7 +568,7 @@ class TestMail( ) def test_handle_inline_files(self): - message = self.create_message( + message = self.mailMocker.messageBuilder.create_message( attachments=[ _AttachmentDef( filename="f1.pdf", @@ -516,7 +588,7 @@ class TestMail( result = self.mail_account_handler._handle_message(message, rule) self.assertEqual(result, 2) - self.assert_queue_consumption_tasks_call_args( + self.mailMocker.assert_queue_consumption_tasks_call_args( [ [ {"override_filename": "f1.pdf"}, @@ -536,7 +608,7 @@ class TestMail( - Mail action should not be performed for files excluded - Mail action should be performed for files included """ - message = self.create_message( + message = self.mailMocker.messageBuilder.create_message( attachments=[ _AttachmentDef(filename="f1.pdf"), _AttachmentDef(filename="f2.pdf"), @@ -620,7 +692,7 @@ class TestMail( for test_case in tests: with self.subTest(msg=test_case.name): - self._queue_consumption_tasks_mock.reset_mock() + self.mailMocker._queue_consumption_tasks_mock.reset_mock() account = MailAccount(name=str(uuid.uuid4())) account.save() rule = MailRule( @@ -633,7 +705,7 @@ class TestMail( rule.save() self.mail_account_handler._handle_message(message, rule) - self.assert_queue_consumption_tasks_call_args( + self.mailMocker.assert_queue_consumption_tasks_call_args( [ [{"override_filename": m} for m in test_case.expected_matches], ], @@ -648,7 +720,7 @@ class TestMail( THEN: - Mail action should not be performed """ - message = self.create_message( + message = self.mailMocker.messageBuilder.create_message( attachments=[ _AttachmentDef( filename="test.png", @@ -656,7 +728,7 @@ class TestMail( ), ], ) - self.bogus_mailbox.messages.append(message) + self.mailMocker.bogus_mailbox.messages.append(message) account = MailAccount.objects.create( name="test", imap_server="", @@ -674,12 +746,12 @@ class TestMail( ) rule.save() - self.assertEqual(len(self.bogus_mailbox.messages), 4) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 4) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.messages), 1) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 1) def test_handle_mail_account_mark_read(self): account = MailAccount.objects.create( @@ -695,14 +767,14 @@ class TestMail( action=MailRule.MailAction.MARK_READ, ) - self.assertEqual(len(self.bogus_mailbox.messages), 3) - self.assertEqual(len(self.bogus_mailbox.fetch("UNSEEN", False)), 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) + self.assertEqual(len(self.mailMocker.bogus_mailbox.fetch("UNSEEN", False)), 2) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.fetch("UNSEEN", False)), 0) - self.assertEqual(len(self.bogus_mailbox.messages), 3) + self.assertEqual(len(self.mailMocker.bogus_mailbox.fetch("UNSEEN", False)), 0) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) def test_handle_mail_account_delete(self): account = MailAccount.objects.create( @@ -719,12 +791,12 @@ class TestMail( filter_subject="Invoice", ) - self.assertEqual(len(self.bogus_mailbox.messages), 3) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.messages), 1) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 1) def test_handle_mail_account_delete_no_filters(self): account = MailAccount.objects.create( @@ -741,12 +813,12 @@ class TestMail( maximum_age=0, ) - self.assertEqual(len(self.bogus_mailbox.messages), 3) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.messages), 0) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 0) def test_handle_mail_account_flag(self): account = MailAccount.objects.create( @@ -763,14 +835,20 @@ class TestMail( filter_subject="Invoice", ) - self.assertEqual(len(self.bogus_mailbox.messages), 3) - self.assertEqual(len(self.bogus_mailbox.fetch("UNFLAGGED", False)), 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) + self.assertEqual( + len(self.mailMocker.bogus_mailbox.fetch("UNFLAGGED", False)), + 2, + ) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.fetch("UNFLAGGED", False)), 1) - self.assertEqual(len(self.bogus_mailbox.messages), 3) + self.assertEqual( + len(self.mailMocker.bogus_mailbox.fetch("UNFLAGGED", False)), + 1, + ) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) @pytest.mark.flaky(reruns=4) def test_handle_mail_account_move(self): @@ -789,14 +867,14 @@ class TestMail( filter_subject="Claim", ) - self.assertEqual(len(self.bogus_mailbox.messages), 3) - self.assertEqual(len(self.bogus_mailbox.messages_spam), 0) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 0) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.messages), 2) - self.assertEqual(len(self.bogus_mailbox.messages_spam), 1) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 1) def test_handle_mail_account_move_no_filters(self): account = MailAccount.objects.create( @@ -814,14 +892,14 @@ class TestMail( maximum_age=0, ) - self.assertEqual(len(self.bogus_mailbox.messages), 3) - self.assertEqual(len(self.bogus_mailbox.messages_spam), 0) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 0) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.messages), 0) - self.assertEqual(len(self.bogus_mailbox.messages_spam), 3) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 0) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 3) def test_handle_mail_account_tag(self): account = MailAccount.objects.create( @@ -838,18 +916,24 @@ class TestMail( action_parameter="processed", ) - self.assertEqual(len(self.bogus_mailbox.messages), 3) - self.assertEqual(len(self.bogus_mailbox.fetch("UNKEYWORD processed", False)), 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) + self.assertEqual( + len(self.mailMocker.bogus_mailbox.fetch("UNKEYWORD processed", False)), + 2, + ) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.messages), 3) - self.assertEqual(len(self.bogus_mailbox.fetch("UNKEYWORD processed", False)), 0) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) + self.assertEqual( + len(self.mailMocker.bogus_mailbox.fetch("UNKEYWORD processed", False)), + 0, + ) def test_handle_mail_account_tag_gmail(self): - self.bogus_mailbox._host = "imap.gmail.com" - self.bogus_mailbox.client.capabilities = ["X-GM-EXT-1"] + self.mailMocker.bogus_mailbox._host = "imap.gmail.com" + self.mailMocker.bogus_mailbox.client.capabilities = ["X-GM-EXT-1"] account = MailAccount.objects.create( name="test", @@ -865,15 +949,15 @@ class TestMail( action_parameter="processed", ) - self.assertEqual(len(self.bogus_mailbox.messages), 3) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) criteria = NOT(gmail_label="processed") - self.assertEqual(len(self.bogus_mailbox.fetch(criteria, False)), 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.fetch(criteria, False)), 2) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.fetch(criteria, False)), 0) - self.assertEqual(len(self.bogus_mailbox.messages), 3) + self.assertEqual(len(self.mailMocker.bogus_mailbox.fetch(criteria, False)), 0) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) def test_tag_mail_action_applemail_wrong_input(self): self.assertRaises( @@ -900,14 +984,20 @@ class TestMail( action_parameter="apple:green", ) - self.assertEqual(len(self.bogus_mailbox.messages), 3) - self.assertEqual(len(self.bogus_mailbox.fetch("UNFLAGGED", False)), 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) + self.assertEqual( + len(self.mailMocker.bogus_mailbox.fetch("UNFLAGGED", False)), + 2, + ) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.fetch("UNFLAGGED", False)), 0) - self.assertEqual(len(self.bogus_mailbox.messages), 3) + self.assertEqual( + len(self.mailMocker.bogus_mailbox.fetch("UNFLAGGED", False)), + 0, + ) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) def test_error_login(self): """ @@ -955,10 +1045,10 @@ class TestMail( ) tasks.process_mail_accounts() - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.messages), 2) - self.assertEqual(len(self.bogus_mailbox.messages_spam), 1) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 1) def test_error_skip_rule(self): account = MailAccount.objects.create( @@ -986,10 +1076,10 @@ class TestMail( ) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(len(self.bogus_mailbox.messages), 2) - self.assertEqual(len(self.bogus_mailbox.messages_spam), 1) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 1) def test_error_folder_set(self): """ @@ -1015,14 +1105,14 @@ class TestMail( folder="uuuhhhh", # Invalid folder name ) - self.bogus_mailbox.folder.list = mock.Mock( + self.mailMocker.bogus_mailbox.folder.list = mock.Mock( return_value=[FolderInfo("SomeFoldername", "|", ())], ) self.mail_account_handler.handle_mail_account(account) - self.bogus_mailbox.folder.list.assert_called_once() - self._queue_consumption_tasks_mock.assert_not_called() + self.mailMocker.bogus_mailbox.folder.list.assert_called_once() + self.mailMocker._queue_consumption_tasks_mock.assert_not_called() def test_error_folder_set_error_listing(self): """ @@ -1048,14 +1138,14 @@ class TestMail( folder="uuuhhhh", # Invalid folder name ) - self.bogus_mailbox.folder.list = mock.Mock( + self.mailMocker.bogus_mailbox.folder.list = mock.Mock( side_effect=MailboxFolderSelectError(None, "uhm"), ) self.mail_account_handler.handle_mail_account(account) - self.bogus_mailbox.folder.list.assert_called_once() - self._queue_consumption_tasks_mock.assert_not_called() + self.mailMocker.bogus_mailbox.folder.list.assert_called_once() + self.mailMocker._queue_consumption_tasks_mock.assert_not_called() @mock.patch("paperless_mail.mail.MailAccountHandler._get_correspondent") def test_error_skip_mail(self, m): @@ -1081,14 +1171,17 @@ class TestMail( ) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() # test that we still consume mail even if some mails throw errors. - self.assertEqual(self._queue_consumption_tasks_mock.call_count, 2) + self.assertEqual(self.mailMocker._queue_consumption_tasks_mock.call_count, 2) # faulty mail still in inbox, untouched - self.assertEqual(len(self.bogus_mailbox.messages), 1) - self.assertEqual(self.bogus_mailbox.messages[0].from_, "amazon@amazon.de") + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 1) + self.assertEqual( + self.mailMocker.bogus_mailbox.messages[0].from_, + "amazon@amazon.de", + ) def test_error_create_correspondent(self): account = MailAccount.objects.create( @@ -1108,10 +1201,10 @@ class TestMail( self.mail_account_handler.handle_mail_account(account) - self._queue_consumption_tasks_mock.assert_called_once() + self.mailMocker._queue_consumption_tasks_mock.assert_called_once() c = Correspondent.objects.get(name="amazon@amazon.de") - self.assert_queue_consumption_tasks_call_args( + self.mailMocker.assert_queue_consumption_tasks_call_args( [ [ {"override_correspondent_id": c.id}, @@ -1119,15 +1212,18 @@ class TestMail( ], ) - self._queue_consumption_tasks_mock.reset_mock() - self.reset_bogus_mailbox() + self.mailMocker._queue_consumption_tasks_mock.reset_mock() + reset_bogus_mailbox( + self.mailMocker.bogus_mailbox, + self.mailMocker.messageBuilder, + ) with mock.patch("paperless_mail.mail.Correspondent.objects.get_or_create") as m: m.side_effect = DatabaseError() self.mail_account_handler.handle_mail_account(account) - self.assert_queue_consumption_tasks_call_args( + self.mailMocker.assert_queue_consumption_tasks_call_args( [ [ {"override_correspondent_id": None}, @@ -1165,21 +1261,24 @@ class TestMail( filter_from=f_from, filter_to=f_to, ) - self.reset_bogus_mailbox() - self._queue_consumption_tasks_mock.reset_mock() + reset_bogus_mailbox( + self.mailMocker.bogus_mailbox, + self.mailMocker.messageBuilder, + ) + self.mailMocker._queue_consumption_tasks_mock.reset_mock() - self._queue_consumption_tasks_mock.assert_not_called() - self.assertEqual(len(self.bogus_mailbox.messages), 3) + self.mailMocker._queue_consumption_tasks_mock.assert_not_called() + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() self.assertEqual( - len(self.bogus_mailbox.messages), + len(self.mailMocker.bogus_mailbox.messages), 3 - expected_mail_count, ) self.assertEqual( - self._queue_consumption_tasks_mock.call_count, + self.mailMocker._queue_consumption_tasks_mock.call_count, expected_mail_count, ) @@ -1206,16 +1305,16 @@ class TestMail( action=MailRule.MailAction.MARK_READ, ) - self.assertEqual(len(self.bogus_mailbox.messages), 3) - self._queue_consumption_tasks_mock.assert_not_called() - self.assertEqual(len(self.bogus_mailbox.fetch("UNSEEN", False)), 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) + self.mailMocker._queue_consumption_tasks_mock.assert_not_called() + self.assertEqual(len(self.mailMocker.bogus_mailbox.fetch("UNSEEN", False)), 2) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(self._queue_consumption_tasks_mock.call_count, 2) - self.assertEqual(len(self.bogus_mailbox.fetch("UNSEEN", False)), 0) - self.assertEqual(len(self.bogus_mailbox.messages), 3) + self.assertEqual(self.mailMocker._queue_consumption_tasks_mock.call_count, 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.fetch("UNSEEN", False)), 0) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) def test_auth_plain_fallback_fails_still(self): """ @@ -1272,75 +1371,16 @@ class TestMail( action=MailRule.MailAction.MARK_READ, ) - self.assertEqual(len(self.bogus_mailbox.messages), 3) - self.assertEqual(self._queue_consumption_tasks_mock.call_count, 0) - self.assertEqual(len(self.bogus_mailbox.fetch("UNSEEN", False)), 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) + self.assertEqual(self.mailMocker._queue_consumption_tasks_mock.call_count, 0) + self.assertEqual(len(self.mailMocker.bogus_mailbox.fetch("UNSEEN", False)), 2) self.mail_account_handler.handle_mail_account(account) - self.apply_mail_actions() + self.mailMocker.apply_mail_actions() - self.assertEqual(self._queue_consumption_tasks_mock.call_count, 2) - self.assertEqual(len(self.bogus_mailbox.fetch("UNSEEN", False)), 0) - self.assertEqual(len(self.bogus_mailbox.messages), 3) - - def assert_queue_consumption_tasks_call_args( - self, - expected_call_args: list[list[dict[str, str]]], - ): - """ - Verifies that queue_consumption_tasks has been called with the expected arguments. - - expected_call_args is the following format: - - * List of calls to queue_consumption_tasks, called once per mail, where each element is: - * List of signatures for the consume_file task, where each element is: - * dictionary containing arguments that need to be present in the consume_file signature. - - """ - - # assert number of calls to queue_consumption_tasks match - self.assertEqual( - len(self._queue_consumption_tasks_mock.call_args_list), - len(expected_call_args), - ) - - for (mock_args, mock_kwargs), expected_signatures in zip( - self._queue_consumption_tasks_mock.call_args_list, - expected_call_args, - ): - consume_tasks = mock_kwargs["consume_tasks"] - - # assert number of consume_file tasks match - self.assertEqual(len(consume_tasks), len(expected_signatures)) - - for consume_task, expected_signature in zip( - consume_tasks, - expected_signatures, - ): - input_doc, overrides = consume_task.args - - # assert the file exists - self.assertIsFile(input_doc.original_file) - - # assert all expected arguments are present in the signature - for key, value in expected_signature.items(): - if key == "override_correspondent_id": - self.assertEqual(overrides.correspondent_id, value) - elif key == "override_filename": - self.assertEqual(overrides.filename, value) - elif key == "override_title": - self.assertEqual(overrides.title, value) - else: - self.fail("No match for expected arg") - - def apply_mail_actions(self): - """ - Applies pending actions to mails by inspecting calls to the queue_consumption_tasks method. - """ - for args, kwargs in self._queue_consumption_tasks_mock.call_args_list: - message = kwargs["message"] - rule = kwargs["rule"] - apply_mail_action([], rule.pk, message.uid, message.subject, message.date) + self.assertEqual(self.mailMocker._queue_consumption_tasks_mock.call_count, 2) + self.assertEqual(len(self.mailMocker.bogus_mailbox.fetch("UNSEEN", False)), 0) + self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 3) class TestManagementCommand(TestCase): diff --git a/src/paperless_mail/tests/test_preprocessor.py b/src/paperless_mail/tests/test_preprocessor.py new file mode 100644 index 000000000..90df77ba8 --- /dev/null +++ b/src/paperless_mail/tests/test_preprocessor.py @@ -0,0 +1,228 @@ +import email +import email.contentmanager +import tempfile +from email.message import Message +from email.mime.application import MIMEApplication +from email.mime.multipart import MIMEMultipart +from unittest import mock + +import gnupg +from django.test import override_settings +from imap_tools import MailMessage + +from paperless_mail.mail import MailAccountHandler +from paperless_mail.models import MailAccount +from paperless_mail.models import MailRule +from paperless_mail.preprocessor import MailMessageDecryptor +from paperless_mail.tests.test_mail import TestMail +from paperless_mail.tests.test_mail import _AttachmentDef + + +class MessageEncryptor: + def __init__(self): + self.gpg_home = tempfile.mkdtemp() + self.gpg = gnupg.GPG(gnupghome=self.gpg_home) + self._testUser = "testuser@example.com" + # Generate a new key + input_data = self.gpg.gen_key_input( + name_email=self._testUser, + passphrase=None, + key_type="RSA", + key_length=2048, + expire_date=0, + no_protection=True, + ) + self.gpg.gen_key(input_data) + + @staticmethod + def get_email_body_without_headers(email_message: Message) -> bytes: + """ + Filters some relevant headers from an EmailMessage and returns just the body. + """ + message_copy = email.message_from_bytes(email_message.as_bytes()) + + message_copy._headers = [ + header + for header in message_copy._headers + if header[0].lower() not in ("from", "to", "subject") + ] + return message_copy.as_bytes() + + def encrypt(self, message): + original_email: email.message.Message = message.obj + encrypted_data = self.gpg.encrypt( + self.get_email_body_without_headers(original_email), + self._testUser, + armor=True, + ) + if not encrypted_data.ok: + raise Exception(f"Encryption failed: {encrypted_data.stderr}") + encrypted_email_content = encrypted_data.data + + new_email = MIMEMultipart("encrypted", protocol="application/pgp-encrypted") + new_email["From"] = original_email["From"] + new_email["To"] = original_email["To"] + new_email["Subject"] = original_email["Subject"] + + # Add the control part + control_part = MIMEApplication(_data=b"", _subtype="pgp-encrypted") + control_part.set_payload("Version: 1") + new_email.attach(control_part) + + # Add the encrypted data part + encrypted_part = MIMEApplication(_data=b"", _subtype="octet-stream") + encrypted_part.set_payload(encrypted_email_content.decode("ascii")) + encrypted_part.add_header( + "Content-Disposition", + 'attachment; filename="encrypted.asc"', + ) + new_email.attach(encrypted_part) + + encrypted_message: MailMessage = MailMessage( + [(f"UID {message.uid}".encode(), new_email.as_bytes())], + ) + return encrypted_message + + +class TestMailMessageGpgDecryptor(TestMail): + def setUp(self): + self.messageEncryptor = MessageEncryptor() + with override_settings( + EMAIL_GNUPG_HOME=self.messageEncryptor.gpg_home, + EMAIL_ENABLE_GPG_DECRYPTOR=True, + ): + super().setUp() + + def test_preprocessor_is_able_to_run(self): + with override_settings( + EMAIL_GNUPG_HOME=self.messageEncryptor.gpg_home, + EMAIL_ENABLE_GPG_DECRYPTOR=True, + ): + self.assertTrue(MailMessageDecryptor.able_to_run()) + + def test_preprocessor_is_able_to_run2(self): + with override_settings( + EMAIL_GNUPG_HOME=None, + EMAIL_ENABLE_GPG_DECRYPTOR=True, + ): + self.assertTrue(MailMessageDecryptor.able_to_run()) + + def test_is_not_able_to_run_disabled(self): + with override_settings( + EMAIL_ENABLE_GPG_DECRYPTOR=False, + ): + self.assertFalse(MailMessageDecryptor.able_to_run()) + + def test_is_not_able_to_run_bogus_path(self): + with override_settings( + EMAIL_ENABLE_GPG_DECRYPTOR=True, + EMAIL_GNUPG_HOME="_)@# notapath &%#$", + ): + self.assertFalse(MailMessageDecryptor.able_to_run()) + + def test_fails_at_initialization(self): + with ( + mock.patch("gnupg.GPG.__init__") as mock_run, + override_settings( + EMAIL_ENABLE_GPG_DECRYPTOR=True, + ), + ): + + def side_effect(*args, **kwargs): + raise OSError("Cannot find 'gpg' binary") + + mock_run.side_effect = side_effect + + handler = MailAccountHandler() + self.assertEqual(len(handler._message_preprocessors), 0) + + def test_decrypt_fails(self): + encrypted_message, _ = self.create_encrypted_unencrypted_message_pair() + empty_gpg_home = tempfile.mkdtemp() + with override_settings( + EMAIL_ENABLE_GPG_DECRYPTOR=True, + EMAIL_GNUPG_HOME=empty_gpg_home, + ): + message_decryptor = MailMessageDecryptor() + self.assertRaises(Exception, message_decryptor.run, encrypted_message) + + def test_decrypt_encrypted_mail(self): + """ + Creates a mail with attachments. Then encrypts it with a new key. + Verifies that this encrypted message can be decrypted with attachments intact. + """ + encrypted_message, message = self.create_encrypted_unencrypted_message_pair() + headers = message.headers + text = message.text + + self.assertEqual(len(encrypted_message.attachments), 1) + self.assertEqual(encrypted_message.attachments[0].filename, "encrypted.asc") + self.assertEqual(encrypted_message.text, "") + + with override_settings( + EMAIL_ENABLE_GPG_DECRYPTOR=True, + EMAIL_GNUPG_HOME=self.messageEncryptor.gpg_home, + ): + message_decryptor = MailMessageDecryptor() + self.assertTrue(message_decryptor.able_to_run()) + decrypted_message = message_decryptor.run(encrypted_message) + + self.assertEqual(len(decrypted_message.attachments), 2) + self.assertEqual(decrypted_message.attachments[0].filename, "f1.pdf") + self.assertEqual(decrypted_message.attachments[1].filename, "f2.pdf") + self.assertEqual(decrypted_message.headers, headers) + self.assertEqual(decrypted_message.text, text) + self.assertEqual(decrypted_message.uid, message.uid) + + def create_encrypted_unencrypted_message_pair(self): + message = self.mailMocker.messageBuilder.create_message( + body="Test message with 2 attachments", + attachments=[ + _AttachmentDef( + filename="f1.pdf", + disposition="inline", + ), + _AttachmentDef(filename="f2.pdf"), + ], + ) + encrypted_message = self.messageEncryptor.encrypt(message) + return encrypted_message, message + + def test_handle_encrypted_message(self): + message = self.mailMocker.messageBuilder.create_message( + subject="the message title", + from_="Myself", + attachments=2, + body="Test mail", + ) + + encrypted_message = self.messageEncryptor.encrypt(message) + + account = MailAccount.objects.create() + rule = MailRule( + assign_title_from=MailRule.TitleSource.FROM_FILENAME, + consumption_scope=MailRule.ConsumptionScope.EVERYTHING, + account=account, + ) + rule.save() + + result = self.mail_account_handler._handle_message(encrypted_message, rule) + + self.assertEqual(result, 3) + + self.mailMocker._queue_consumption_tasks_mock.assert_called() + + self.mailMocker.assert_queue_consumption_tasks_call_args( + [ + [ + { + "override_title": message.subject, + "override_filename": f"{message.subject}.eml", + }, + ], + [ + {"override_title": "file_0", "override_filename": "file_0.pdf"}, + {"override_title": "file_1", "override_filename": "file_1.pdf"}, + ], + ], + ) From dad3a1ff28992891688810186dd4df051c4385b8 Mon Sep 17 00:00:00 2001 From: shamoon <4887959+shamoon@users.noreply.github.com> Date: Wed, 28 Aug 2024 20:10:23 -0700 Subject: [PATCH 10/32] Feature: add Korean language (#7573) --- src-ui/angular.json | 1 + src-ui/messages.xlf | 45 ++++++++++++--------- src-ui/setup-jest.ts | 2 + src-ui/src/app/app.module.ts | 2 + src-ui/src/app/services/settings.service.ts | 6 +++ src/locale/en_US/LC_MESSAGES/django.po | 42 ++++++++++--------- src/paperless/settings.py | 1 + 7 files changed, 61 insertions(+), 38 deletions(-) diff --git a/src-ui/angular.json b/src-ui/angular.json index a1bb47f1a..90a54e133 100644 --- a/src-ui/angular.json +++ b/src-ui/angular.json @@ -33,6 +33,7 @@ "it-IT": "src/locale/messages.it_IT.xlf", "ja-JP": "src/locale/messages.ja_JP.xlf", "lb-LU": "src/locale/messages.lb_LU.xlf", + "ko-KR": "src/locale/messages.ko_KR.xlf", "nl-NL": "src/locale/messages.nl_NL.xlf", "no-NO": "src/locale/messages.no_NO.xlf", "pl-PL": "src/locale/messages.pl_PL.xlf", diff --git a/src-ui/messages.xlf b/src-ui/messages.xlf index 75f585b76..71cda881d 100644 --- a/src-ui/messages.xlf +++ b/src-ui/messages.xlf @@ -8602,137 +8602,144 @@ 142 + + Korean + + src/app/services/settings.service.ts + 148 + + Luxembourgish src/app/services/settings.service.ts - 148 + 154 Dutch src/app/services/settings.service.ts - 154 + 160 Norwegian src/app/services/settings.service.ts - 160 + 166 Polish src/app/services/settings.service.ts - 166 + 172 Portuguese (Brazil) src/app/services/settings.service.ts - 172 + 178 Portuguese src/app/services/settings.service.ts - 178 + 184 Romanian src/app/services/settings.service.ts - 184 + 190 Russian src/app/services/settings.service.ts - 190 + 196 Slovak src/app/services/settings.service.ts - 196 + 202 Slovenian src/app/services/settings.service.ts - 202 + 208 Serbian src/app/services/settings.service.ts - 208 + 214 Swedish src/app/services/settings.service.ts - 214 + 220 Turkish src/app/services/settings.service.ts - 220 + 226 Ukrainian src/app/services/settings.service.ts - 226 + 232 Chinese Simplified src/app/services/settings.service.ts - 232 + 238 ISO 8601 src/app/services/settings.service.ts - 240 + 246 Successfully completed one-time migratration of settings to the database! src/app/services/settings.service.ts - 567 + 573 Unable to migrate settings to the database, please try saving manually. src/app/services/settings.service.ts - 568 + 574 You can restart the tour from the settings page. src/app/services/settings.service.ts - 638 + 644 diff --git a/src-ui/setup-jest.ts b/src-ui/setup-jest.ts index 3486d17fc..f545e7295 100644 --- a/src-ui/setup-jest.ts +++ b/src-ui/setup-jest.ts @@ -24,6 +24,7 @@ import localeFr from '@angular/common/locales/fr' import localeHu from '@angular/common/locales/hu' import localeIt from '@angular/common/locales/it' import localeJa from '@angular/common/locales/ja' +import localeKo from '@angular/common/locales/ko' import localeLb from '@angular/common/locales/lb' import localeNl from '@angular/common/locales/nl' import localeNo from '@angular/common/locales/no' @@ -55,6 +56,7 @@ registerLocaleData(localeFr) registerLocaleData(localeHu) registerLocaleData(localeIt) registerLocaleData(localeJa) +registerLocaleData(localeKo) registerLocaleData(localeLb) registerLocaleData(localeNl) registerLocaleData(localeNo) diff --git a/src-ui/src/app/app.module.ts b/src-ui/src/app/app.module.ts index 3c4edc085..005de5369 100644 --- a/src-ui/src/app/app.module.ts +++ b/src-ui/src/app/app.module.ts @@ -347,6 +347,7 @@ import localeFr from '@angular/common/locales/fr' import localeHu from '@angular/common/locales/hu' import localeIt from '@angular/common/locales/it' import localeJa from '@angular/common/locales/ja' +import localeKo from '@angular/common/locales/ko' import localeLb from '@angular/common/locales/lb' import localeNl from '@angular/common/locales/nl' import localeNo from '@angular/common/locales/no' @@ -378,6 +379,7 @@ registerLocaleData(localeFr) registerLocaleData(localeHu) registerLocaleData(localeIt) registerLocaleData(localeJa) +registerLocaleData(localeKo) registerLocaleData(localeLb) registerLocaleData(localeNl) registerLocaleData(localeNo) diff --git a/src-ui/src/app/services/settings.service.ts b/src-ui/src/app/services/settings.service.ts index 517098557..91d1cc320 100644 --- a/src-ui/src/app/services/settings.service.ts +++ b/src-ui/src/app/services/settings.service.ts @@ -143,6 +143,12 @@ const LANGUAGE_OPTIONS = [ englishName: 'Japanese', dateInputFormat: 'yyyy/mm/dd', }, + { + code: 'ko-kr', + name: $localize`Korean`, + englishName: 'Korean', + dateInputFormat: 'yyyy-mm-dd', + }, { code: 'lb-lu', name: $localize`Luxembourgish`, diff --git a/src/locale/en_US/LC_MESSAGES/django.po b/src/locale/en_US/LC_MESSAGES/django.po index 7eeb97d17..0c2f6dd4b 100644 --- a/src/locale/en_US/LC_MESSAGES/django.po +++ b/src/locale/en_US/LC_MESSAGES/django.po @@ -2,7 +2,7 @@ msgid "" msgstr "" "Project-Id-Version: paperless-ngx\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-08-19 23:29-0700\n" +"POT-Creation-Date: 2024-08-28 17:12-0700\n" "PO-Revision-Date: 2022-02-17 04:17\n" "Last-Translator: \n" "Language-Team: English\n" @@ -936,21 +936,21 @@ msgstr "" msgid "enabled" msgstr "" -#: documents/serialisers.py:119 +#: documents/serialisers.py:120 #, python-format msgid "Invalid regular expression: %(error)s" msgstr "" -#: documents/serialisers.py:435 +#: documents/serialisers.py:467 msgid "Invalid color." msgstr "" -#: documents/serialisers.py:1331 +#: documents/serialisers.py:1397 #, python-format msgid "File type %(type)s not supported" msgstr "" -#: documents/serialisers.py:1440 +#: documents/serialisers.py:1506 msgid "Invalid variable detected." msgstr "" @@ -1387,62 +1387,66 @@ msgid "Japanese" msgstr "" #: paperless/settings.py:699 -msgid "Luxembourgish" +msgid "Korean" msgstr "" #: paperless/settings.py:700 -msgid "Norwegian" +msgid "Luxembourgish" msgstr "" #: paperless/settings.py:701 -msgid "Dutch" +msgid "Norwegian" msgstr "" #: paperless/settings.py:702 -msgid "Polish" +msgid "Dutch" msgstr "" #: paperless/settings.py:703 -msgid "Portuguese (Brazil)" +msgid "Polish" msgstr "" #: paperless/settings.py:704 -msgid "Portuguese" +msgid "Portuguese (Brazil)" msgstr "" #: paperless/settings.py:705 -msgid "Romanian" +msgid "Portuguese" msgstr "" #: paperless/settings.py:706 -msgid "Russian" +msgid "Romanian" msgstr "" #: paperless/settings.py:707 -msgid "Slovak" +msgid "Russian" msgstr "" #: paperless/settings.py:708 -msgid "Slovenian" +msgid "Slovak" msgstr "" #: paperless/settings.py:709 -msgid "Serbian" +msgid "Slovenian" msgstr "" #: paperless/settings.py:710 -msgid "Swedish" +msgid "Serbian" msgstr "" #: paperless/settings.py:711 -msgid "Turkish" +msgid "Swedish" msgstr "" #: paperless/settings.py:712 -msgid "Ukrainian" +msgid "Turkish" msgstr "" #: paperless/settings.py:713 +msgid "Ukrainian" +msgstr "" + +#: paperless/settings.py:714 msgid "Chinese Simplified" msgstr "" diff --git a/src/paperless/settings.py b/src/paperless/settings.py index 27c609e8d..ee6110732 100644 --- a/src/paperless/settings.py +++ b/src/paperless/settings.py @@ -696,6 +696,7 @@ LANGUAGES = [ ("hu-hu", _("Hungarian")), ("it-it", _("Italian")), ("ja-jp", _("Japanese")), + ("ko-kr", _("Korean")), ("lb-lu", _("Luxembourgish")), ("no-no", _("Norwegian")), ("nl-nl", _("Dutch")), From b8283047ae8bafe98c96a05322dc49bd4cb4fff2 Mon Sep 17 00:00:00 2001 From: shamoon <4887959+shamoon@users.noreply.github.com> Date: Fri, 30 Aug 2024 15:43:08 -0700 Subject: [PATCH 11/32] Fix: saved view sidebar heading not always visible (#7584) --- src-ui/messages.xlf | 90 ++++++++++--------- .../app-frame/app-frame.component.html | 48 +++++----- .../app-frame/app-frame.component.ts | 1 - .../services/rest/saved-view.service.spec.ts | 10 +++ .../app/services/rest/saved-view.service.ts | 24 ++--- 5 files changed, 95 insertions(+), 78 deletions(-) diff --git a/src-ui/messages.xlf b/src-ui/messages.xlf index 71cda881d..161ff52d7 100644 --- a/src-ui/messages.xlf +++ b/src-ui/messages.xlf @@ -358,11 +358,11 @@ src/app/components/app-frame/app-frame.component.html - 235 + 237 src/app/components/app-frame/app-frame.component.html - 237 + 239 @@ -641,11 +641,11 @@ src/app/components/app-frame/app-frame.component.html - 270 + 272 src/app/components/app-frame/app-frame.component.html - 273 + 275 @@ -1309,6 +1309,10 @@ src/app/components/app-frame/app-frame.component.html 98 + + src/app/components/app-frame/app-frame.component.html + 103 + Show warning when closing saved views with unsaved changes @@ -1755,7 +1759,7 @@ src/app/components/app-frame/app-frame.component.ts - 126 + 125 @@ -1773,11 +1777,11 @@ src/app/components/app-frame/app-frame.component.html - 258 + 260 src/app/components/app-frame/app-frame.component.html - 260 + 262 @@ -2082,11 +2086,11 @@ src/app/components/app-frame/app-frame.component.html - 218 + 220 src/app/components/app-frame/app-frame.component.html - 221 + 223 @@ -2296,11 +2300,11 @@ src/app/components/app-frame/app-frame.component.html - 249 + 251 src/app/components/app-frame/app-frame.component.html - 251 + 253 @@ -2645,47 +2649,47 @@ src/app/components/app-frame/app-frame.component.html - 279 + 281 src/app/components/app-frame/app-frame.component.html - 282 + 284 Open documents src/app/components/app-frame/app-frame.component.html - 128 + 130 Close all src/app/components/app-frame/app-frame.component.html - 148 + 150 src/app/components/app-frame/app-frame.component.html - 150 + 152 Manage src/app/components/app-frame/app-frame.component.html - 159 + 161 Correspondents src/app/components/app-frame/app-frame.component.html - 165 + 167 src/app/components/app-frame/app-frame.component.html - 167 + 169 src/app/components/dashboard/widgets/statistics-widget/statistics-widget.component.html @@ -2696,11 +2700,11 @@ Tags src/app/components/app-frame/app-frame.component.html - 172 + 174 src/app/components/app-frame/app-frame.component.html - 175 + 177 src/app/components/common/input/tags/tags.component.ts @@ -2731,11 +2735,11 @@ Document Types src/app/components/app-frame/app-frame.component.html - 181 + 183 src/app/components/app-frame/app-frame.component.html - 183 + 185 src/app/components/dashboard/widgets/statistics-widget/statistics-widget.component.html @@ -2746,11 +2750,11 @@ Storage Paths src/app/components/app-frame/app-frame.component.html - 188 + 190 src/app/components/app-frame/app-frame.component.html - 190 + 192 src/app/components/dashboard/widgets/statistics-widget/statistics-widget.component.html @@ -2761,11 +2765,11 @@ Custom Fields src/app/components/app-frame/app-frame.component.html - 195 + 197 src/app/components/app-frame/app-frame.component.html - 197 + 199 src/app/components/common/custom-fields-dropdown/custom-fields-dropdown.component.html @@ -2780,11 +2784,11 @@ Workflows src/app/components/app-frame/app-frame.component.html - 204 + 206 src/app/components/app-frame/app-frame.component.html - 206 + 208 src/app/components/manage/workflows/workflows.component.html @@ -2795,92 +2799,92 @@ Mail src/app/components/app-frame/app-frame.component.html - 211 + 213 src/app/components/app-frame/app-frame.component.html - 214 + 216 Administration src/app/components/app-frame/app-frame.component.html - 229 + 231 Configuration src/app/components/app-frame/app-frame.component.html - 242 + 244 src/app/components/app-frame/app-frame.component.html - 244 + 246 GitHub src/app/components/app-frame/app-frame.component.html - 289 + 291 is available. src/app/components/app-frame/app-frame.component.html - 298,299 + 300,301 Click to view. src/app/components/app-frame/app-frame.component.html - 299 + 301 Paperless-ngx can automatically check for updates src/app/components/app-frame/app-frame.component.html - 303 + 305 How does this work? src/app/components/app-frame/app-frame.component.html - 310,312 + 312,314 Update available src/app/components/app-frame/app-frame.component.html - 323 + 325 Sidebar views updated src/app/components/app-frame/app-frame.component.ts - 209 + 208 Error updating sidebar views src/app/components/app-frame/app-frame.component.ts - 212 + 211 An error occurred while saving update checking settings. src/app/components/app-frame/app-frame.component.ts - 233 + 232 diff --git a/src-ui/src/app/components/app-frame/app-frame.component.html b/src-ui/src/app/components/app-frame/app-frame.component.html index 94c046d70..aec850a5d 100644 --- a/src-ui/src/app/components/app-frame/app-frame.component.html +++ b/src-ui/src/app/components/app-frame/app-frame.component.html @@ -93,33 +93,35 @@