mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-04-21 15:29:26 +00:00
Compare commits
10 Commits
fix/moneta
...
chore/inde
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ab2b16d9d | ||
|
|
ffaa2bb77a | ||
|
|
50ec987a81 | ||
|
|
f784a74eba | ||
|
|
814fdf5892 | ||
|
|
58789e5061 | ||
|
|
7492cda794 | ||
|
|
fbf4e32646 | ||
|
|
733d873e34 | ||
|
|
5e609101d1 |
@@ -94,12 +94,18 @@ main {
|
||||
}
|
||||
|
||||
.sidebar.slim:not(.animating) {
|
||||
transition: none;
|
||||
|
||||
li.nav-item span,
|
||||
.sidebar-heading span {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
.sidebar.slim:not(.animating) ~ main.col-slim {
|
||||
transition: none;
|
||||
}
|
||||
|
||||
.sidebar.animating {
|
||||
li.nav-item span,
|
||||
.sidebar-heading span {
|
||||
|
||||
@@ -167,7 +167,6 @@ class TaskAdmin(admin.ModelAdmin):
|
||||
"wait_time_seconds",
|
||||
"input_data",
|
||||
"result_data",
|
||||
"result_message",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -17,7 +17,9 @@ from pikepdf import Pdf
|
||||
from documents.converters import convert_from_tiff_to_pdf
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.models import Document
|
||||
from documents.models import PaperlessTask
|
||||
from documents.models import Tag
|
||||
from documents.plugins.base import ConsumeTaskPlugin
|
||||
from documents.plugins.base import StopConsumeTaskError
|
||||
@@ -193,23 +195,36 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
|
||||
from documents import tasks
|
||||
|
||||
_SOURCE_TO_TRIGGER: dict[DocumentSource, PaperlessTask.TriggerSource] = {
|
||||
DocumentSource.ConsumeFolder: PaperlessTask.TriggerSource.FOLDER_CONSUME,
|
||||
DocumentSource.ApiUpload: PaperlessTask.TriggerSource.API_UPLOAD,
|
||||
DocumentSource.MailFetch: PaperlessTask.TriggerSource.EMAIL_CONSUME,
|
||||
DocumentSource.WebUI: PaperlessTask.TriggerSource.WEB_UI,
|
||||
}
|
||||
trigger_source = _SOURCE_TO_TRIGGER.get(
|
||||
self.input_doc.source,
|
||||
PaperlessTask.TriggerSource.MANUAL,
|
||||
)
|
||||
|
||||
# Create the split document tasks
|
||||
for new_document in self.separate_pages(separator_pages):
|
||||
copy_file_with_basic_stats(new_document, tmp_dir / new_document.name)
|
||||
|
||||
task = tasks.consume_file.delay(
|
||||
ConsumableDocument(
|
||||
# Same source, for templates
|
||||
source=self.input_doc.source,
|
||||
mailrule_id=self.input_doc.mailrule_id,
|
||||
# Can't use same folder or the consume might grab it again
|
||||
original_file=(tmp_dir / new_document.name).resolve(),
|
||||
# Adding optional original_path for later uses in
|
||||
# workflow matching
|
||||
original_path=self.input_doc.original_file,
|
||||
),
|
||||
# All the same metadata
|
||||
self.metadata,
|
||||
task = tasks.consume_file.apply_async(
|
||||
kwargs={
|
||||
"input_doc": ConsumableDocument(
|
||||
# Same source, for templates
|
||||
source=self.input_doc.source,
|
||||
mailrule_id=self.input_doc.mailrule_id,
|
||||
# Can't use same folder or the consume might grab it again
|
||||
original_file=(tmp_dir / new_document.name).resolve(),
|
||||
# Adding optional original_path for later uses in
|
||||
# workflow matching
|
||||
original_path=self.input_doc.original_file,
|
||||
),
|
||||
"overrides": self.metadata,
|
||||
},
|
||||
headers={"trigger_source": trigger_source},
|
||||
)
|
||||
logger.info(f"Created new task {task.id} for {new_document.name}")
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ from documents.models import CustomField
|
||||
from documents.models import CustomFieldInstance
|
||||
from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import PaperlessTask
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.permissions import set_permissions_for_object
|
||||
@@ -113,7 +114,10 @@ def set_correspondent(
|
||||
affected_docs = list(qs.values_list("pk", flat=True))
|
||||
qs.update(correspondent=correspondent)
|
||||
|
||||
bulk_update_documents.delay(document_ids=affected_docs)
|
||||
bulk_update_documents.apply_async(
|
||||
kwargs={"document_ids": affected_docs},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.SYSTEM},
|
||||
)
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -132,8 +136,9 @@ def set_storage_path(doc_ids: list[int], storage_path: StoragePath) -> Literal["
|
||||
affected_docs = list(qs.values_list("pk", flat=True))
|
||||
qs.update(storage_path=storage_path)
|
||||
|
||||
bulk_update_documents.delay(
|
||||
document_ids=affected_docs,
|
||||
bulk_update_documents.apply_async(
|
||||
kwargs={"document_ids": affected_docs},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.SYSTEM},
|
||||
)
|
||||
|
||||
return "OK"
|
||||
@@ -151,7 +156,10 @@ def set_document_type(doc_ids: list[int], document_type: DocumentType) -> Litera
|
||||
affected_docs = list(qs.values_list("pk", flat=True))
|
||||
qs.update(document_type=document_type)
|
||||
|
||||
bulk_update_documents.delay(document_ids=affected_docs)
|
||||
bulk_update_documents.apply_async(
|
||||
kwargs={"document_ids": affected_docs},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.SYSTEM},
|
||||
)
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -177,7 +185,10 @@ def add_tag(doc_ids: list[int], tag: int) -> Literal["OK"]:
|
||||
DocumentTagRelationship.objects.bulk_create(to_create)
|
||||
|
||||
if affected_docs:
|
||||
bulk_update_documents.delay(document_ids=list(affected_docs))
|
||||
bulk_update_documents.apply_async(
|
||||
kwargs={"document_ids": list(affected_docs)},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.SYSTEM},
|
||||
)
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -195,7 +206,10 @@ def remove_tag(doc_ids: list[int], tag: int) -> Literal["OK"]:
|
||||
qs.delete()
|
||||
|
||||
if affected_docs:
|
||||
bulk_update_documents.delay(document_ids=affected_docs)
|
||||
bulk_update_documents.apply_async(
|
||||
kwargs={"document_ids": affected_docs},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.SYSTEM},
|
||||
)
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -254,7 +268,10 @@ def modify_tags(
|
||||
)
|
||||
|
||||
if affected_docs:
|
||||
bulk_update_documents.delay(document_ids=affected_docs)
|
||||
bulk_update_documents.apply_async(
|
||||
kwargs={"document_ids": affected_docs},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.SYSTEM},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error modifying tags: {e}")
|
||||
return "ERROR"
|
||||
@@ -326,7 +343,10 @@ def modify_custom_fields(
|
||||
field_id__in=remove_custom_fields,
|
||||
).hard_delete()
|
||||
|
||||
bulk_update_documents.delay(document_ids=affected_docs)
|
||||
bulk_update_documents.apply_async(
|
||||
kwargs={"document_ids": affected_docs},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.SYSTEM},
|
||||
)
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -369,8 +389,9 @@ def delete(doc_ids: list[int]) -> Literal["OK"]:
|
||||
|
||||
def reprocess(doc_ids: list[int]) -> Literal["OK"]:
|
||||
for document_id in doc_ids:
|
||||
update_document_content_maybe_archive_file.delay(
|
||||
document_id=document_id,
|
||||
update_document_content_maybe_archive_file.apply_async(
|
||||
kwargs={"document_id": document_id},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.MANUAL},
|
||||
)
|
||||
|
||||
return "OK"
|
||||
@@ -396,7 +417,10 @@ def set_permissions(
|
||||
|
||||
affected_docs = list(qs.values_list("pk", flat=True))
|
||||
|
||||
bulk_update_documents.delay(document_ids=affected_docs)
|
||||
bulk_update_documents.apply_async(
|
||||
kwargs={"document_ids": affected_docs},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.SYSTEM},
|
||||
)
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -407,6 +431,7 @@ def rotate(
|
||||
*,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
trigger_source: PaperlessTask.TriggerSource = PaperlessTask.TriggerSource.WEB_UI,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to rotate {len(doc_ids)} documents by {degrees} degrees.",
|
||||
@@ -453,13 +478,16 @@ def rotate(
|
||||
if user is not None:
|
||||
overrides.actor_id = user.id
|
||||
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
consume_file.apply_async(
|
||||
kwargs={
|
||||
"input_doc": ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
"overrides": overrides,
|
||||
},
|
||||
headers={"trigger_source": trigger_source},
|
||||
)
|
||||
logger.info(
|
||||
f"Queued new rotated version for document {root_doc.id} by {degrees} degrees",
|
||||
@@ -478,6 +506,7 @@ def merge(
|
||||
archive_fallback: bool = False,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
trigger_source: PaperlessTask.TriggerSource = PaperlessTask.TriggerSource.WEB_UI,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to merge {len(doc_ids)} documents into a single document.",
|
||||
@@ -556,12 +585,12 @@ def merge(
|
||||
logger.info("Adding merged document to the task queue.")
|
||||
|
||||
consume_task = consume_file.s(
|
||||
ConsumableDocument(
|
||||
input_doc=ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
overrides=overrides,
|
||||
).set(headers={"trigger_source": trigger_source})
|
||||
|
||||
if delete_originals:
|
||||
backup = release_archive_serial_numbers(affected_docs)
|
||||
@@ -577,7 +606,7 @@ def merge(
|
||||
restore_archive_serial_numbers(backup)
|
||||
raise
|
||||
else:
|
||||
consume_task.delay()
|
||||
consume_task.apply_async()
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -589,6 +618,7 @@ def split(
|
||||
delete_originals: bool = False,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
trigger_source: PaperlessTask.TriggerSource = PaperlessTask.TriggerSource.WEB_UI,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to split document {doc_ids[0]} into {len(pages)} documents",
|
||||
@@ -631,12 +661,12 @@ def split(
|
||||
)
|
||||
consume_tasks.append(
|
||||
consume_file.s(
|
||||
ConsumableDocument(
|
||||
input_doc=ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
),
|
||||
overrides,
|
||||
),
|
||||
overrides=overrides,
|
||||
).set(headers={"trigger_source": trigger_source}),
|
||||
)
|
||||
|
||||
if delete_originals:
|
||||
@@ -669,6 +699,7 @@ def delete_pages(
|
||||
*,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
trigger_source: PaperlessTask.TriggerSource = PaperlessTask.TriggerSource.WEB_UI,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to delete pages {pages} from {len(doc_ids)} documents",
|
||||
@@ -698,13 +729,16 @@ def delete_pages(
|
||||
overrides = DocumentMetadataOverrides().from_document(root_doc)
|
||||
if user is not None:
|
||||
overrides.actor_id = user.id
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
consume_file.apply_async(
|
||||
kwargs={
|
||||
"input_doc": ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
"overrides": overrides,
|
||||
},
|
||||
headers={"trigger_source": trigger_source},
|
||||
)
|
||||
logger.info(
|
||||
f"Queued new version for document {root_doc.id} after deleting pages {pages}",
|
||||
@@ -724,6 +758,7 @@ def edit_pdf(
|
||||
include_metadata: bool = True,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
trigger_source: PaperlessTask.TriggerSource = PaperlessTask.TriggerSource.WEB_UI,
|
||||
) -> Literal["OK"]:
|
||||
"""
|
||||
Operations is a list of dictionaries describing the final PDF pages.
|
||||
@@ -781,13 +816,16 @@ def edit_pdf(
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
overrides.actor_id = user.id
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
consume_file.apply_async(
|
||||
kwargs={
|
||||
"input_doc": ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
"overrides": overrides,
|
||||
},
|
||||
headers={"trigger_source": trigger_source},
|
||||
)
|
||||
else:
|
||||
consume_tasks = []
|
||||
@@ -812,12 +850,12 @@ def edit_pdf(
|
||||
pdf.save(version_filepath)
|
||||
consume_tasks.append(
|
||||
consume_file.s(
|
||||
ConsumableDocument(
|
||||
input_doc=ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=version_filepath,
|
||||
),
|
||||
overrides,
|
||||
),
|
||||
overrides=overrides,
|
||||
).set(headers={"trigger_source": trigger_source}),
|
||||
)
|
||||
|
||||
if delete_original:
|
||||
@@ -853,6 +891,7 @@ def remove_password(
|
||||
include_metadata: bool = True,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
trigger_source: PaperlessTask.TriggerSource = PaperlessTask.TriggerSource.WEB_UI,
|
||||
) -> Literal["OK"]:
|
||||
"""
|
||||
Remove password protection from PDF documents.
|
||||
@@ -887,13 +926,16 @@ def remove_password(
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
overrides.actor_id = user.id
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
consume_file.apply_async(
|
||||
kwargs={
|
||||
"input_doc": ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
"overrides": overrides,
|
||||
},
|
||||
headers={"trigger_source": trigger_source},
|
||||
)
|
||||
else:
|
||||
consume_tasks = []
|
||||
@@ -908,12 +950,12 @@ def remove_password(
|
||||
|
||||
consume_tasks.append(
|
||||
consume_file.s(
|
||||
ConsumableDocument(
|
||||
input_doc=ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
),
|
||||
overrides,
|
||||
),
|
||||
overrides=overrides,
|
||||
).set(headers={"trigger_source": trigger_source}),
|
||||
)
|
||||
|
||||
if delete_original:
|
||||
|
||||
@@ -20,6 +20,7 @@ from rest_framework.reverse import reverse
|
||||
|
||||
from documents.classifier import load_classifier
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import ConsumeFileSuccessResult
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.file_handling import create_source_path_directory
|
||||
from documents.file_handling import generate_filename
|
||||
@@ -90,6 +91,15 @@ class ConsumerError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ConsumeFileDuplicateError(ConsumerError):
|
||||
"""Raised when a file is rejected because it duplicates an existing document."""
|
||||
|
||||
def __init__(self, message: str, duplicate_id: int, *, in_trash: bool) -> None:
|
||||
super().__init__(message)
|
||||
self.duplicate_id = duplicate_id
|
||||
self.in_trash = in_trash
|
||||
|
||||
|
||||
class ConsumerStatusShortMessage(StrEnum):
|
||||
DOCUMENT_ALREADY_EXISTS = "document_already_exists"
|
||||
DOCUMENT_ALREADY_EXISTS_IN_TRASH = "document_already_exists_in_trash"
|
||||
@@ -395,7 +405,7 @@ class ConsumerPlugin(
|
||||
exception=e,
|
||||
)
|
||||
|
||||
def run(self) -> str:
|
||||
def run(self) -> "ConsumeFileSuccessResult":
|
||||
"""
|
||||
Return the document object if it was successfully created.
|
||||
"""
|
||||
@@ -771,7 +781,7 @@ class ConsumerPlugin(
|
||||
# Return the most up to date fields
|
||||
document.refresh_from_db()
|
||||
|
||||
return f"Success. New document id {document.pk} created"
|
||||
return ConsumeFileSuccessResult(document_id=document.pk)
|
||||
|
||||
def _parse_title_placeholders(self, title: str) -> str:
|
||||
local_added = timezone.localtime(timezone.now())
|
||||
@@ -1010,9 +1020,13 @@ class ConsumerPreflightPlugin(
|
||||
)
|
||||
failure_msg += " Note: existing document is in the trash."
|
||||
|
||||
self._fail(
|
||||
status_msg,
|
||||
failure_msg,
|
||||
self._send_progress(100, 100, ProgressStatusOptions.FAILED, status_msg)
|
||||
self.log.error(failure_msg)
|
||||
in_trash = duplicates_in_trash.exists()
|
||||
raise ConsumeFileDuplicateError(
|
||||
f"{self.filename}: {failure_msg}",
|
||||
duplicate.pk,
|
||||
in_trash=in_trash,
|
||||
)
|
||||
|
||||
def pre_check_directories(self) -> None:
|
||||
|
||||
@@ -2,6 +2,7 @@ import dataclasses
|
||||
import datetime
|
||||
from enum import IntEnum
|
||||
from pathlib import Path
|
||||
from typing import TypedDict
|
||||
|
||||
import magic
|
||||
from guardian.shortcuts import get_groups_with_perms
|
||||
@@ -184,3 +185,26 @@ class ConsumableDocument:
|
||||
# Get the file type once at init
|
||||
# Note this function isn't called when the object is unpickled
|
||||
self.mime_type = magic.from_file(self.original_file, mime=True)
|
||||
|
||||
|
||||
class ConsumeFileDuplicateResult(TypedDict):
|
||||
"""Returned by consume_file when the file is rejected as a duplicate."""
|
||||
|
||||
duplicate_of: int
|
||||
duplicate_in_trash: bool
|
||||
|
||||
|
||||
class ConsumeFileSuccessResult(TypedDict):
|
||||
"""Returned by consume_file when the document is created successfully."""
|
||||
|
||||
document_id: int
|
||||
|
||||
|
||||
class ConsumeFileStoppedResult(TypedDict):
|
||||
"""Returned by consume_file when a plugin raises StopConsumeTaskError.
|
||||
|
||||
Examples: barcode split dispatched child tasks, double-sided scan waiting
|
||||
for the second half, workflow deleted the document during consumption.
|
||||
"""
|
||||
|
||||
reason: str
|
||||
|
||||
@@ -6,8 +6,6 @@ import json
|
||||
import logging
|
||||
import operator
|
||||
from contextlib import contextmanager
|
||||
from decimal import Decimal
|
||||
from decimal import InvalidOperation
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
|
||||
@@ -293,34 +291,6 @@ class MimeTypeFilter(Filter):
|
||||
return qs
|
||||
|
||||
|
||||
class MonetaryAmountField(serializers.Field):
|
||||
"""
|
||||
Accepts either a plain decimal string ("100", "100.00") or a currency-prefixed
|
||||
string ("USD100.00") and returns the numeric amount as a Decimal.
|
||||
|
||||
Mirrors the logic of the value_monetary_amount generated field: if the value
|
||||
starts with a non-digit, the first 3 characters are treated as a currency code
|
||||
(ISO 4217) and stripped before parsing. This preserves backwards compatibility
|
||||
with saved views that stored a currency-prefixed string as the filter value.
|
||||
"""
|
||||
|
||||
default_error_messages = {"invalid": "A valid number is required."}
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if not isinstance(data, str | int | float):
|
||||
self.fail("invalid")
|
||||
value = str(data).strip()
|
||||
if value and not value[0].isdigit() and value[0] != "-":
|
||||
value = value[3:] # strip 3-char ISO 4217 currency code
|
||||
try:
|
||||
return Decimal(value)
|
||||
except InvalidOperation:
|
||||
self.fail("invalid")
|
||||
|
||||
def to_representation(self, value):
|
||||
return str(value)
|
||||
|
||||
|
||||
class SelectField(serializers.CharField):
|
||||
def __init__(self, custom_field: CustomField) -> None:
|
||||
self._options = custom_field.extra_data["select_options"]
|
||||
@@ -546,8 +516,9 @@ class CustomFieldQueryParser:
|
||||
value_field_name = CustomFieldInstance.get_value_field_name(
|
||||
custom_field.data_type,
|
||||
)
|
||||
if custom_field.data_type == CustomField.FieldDataType.MONETARY and (
|
||||
op in self.EXPR_BY_CATEGORY["arithmetic"] or op in {"exact", "in"}
|
||||
if (
|
||||
custom_field.data_type == CustomField.FieldDataType.MONETARY
|
||||
and op in self.EXPR_BY_CATEGORY["arithmetic"]
|
||||
):
|
||||
value_field_name = "value_monetary_amount"
|
||||
has_field = Q(custom_fields__field=custom_field)
|
||||
@@ -657,13 +628,6 @@ class CustomFieldQueryParser:
|
||||
elif custom_field.data_type == CustomField.FieldDataType.URL:
|
||||
# For URL fields we don't need to be strict about validation (e.g., for istartswith).
|
||||
field = serializers.CharField()
|
||||
elif custom_field.data_type == CustomField.FieldDataType.MONETARY and (
|
||||
op in self.EXPR_BY_CATEGORY["arithmetic"] or op in {"exact", "in"}
|
||||
):
|
||||
# These ops compare against value_monetary_amount (a DecimalField).
|
||||
# MonetaryAmountField accepts both "100" and "USD100.00" for backwards
|
||||
# compatibility with saved views that stored currency-prefixed values.
|
||||
field = MonetaryAmountField()
|
||||
else:
|
||||
# The general case: inferred from the corresponding field in CustomFieldInstance.
|
||||
value_field_name = CustomFieldInstance.get_value_field_name(
|
||||
|
||||
@@ -27,6 +27,7 @@ from watchfiles import watch
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.models import PaperlessTask
|
||||
from documents.models import Tag
|
||||
from documents.parsers import get_supported_file_extensions
|
||||
from documents.tasks import consume_file
|
||||
@@ -338,12 +339,15 @@ def _consume_file(
|
||||
# Queue for consumption
|
||||
try:
|
||||
logger.info(f"Adding {filepath} to the task queue")
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
),
|
||||
DocumentMetadataOverrides(tag_ids=tag_ids),
|
||||
consume_file.apply_async(
|
||||
kwargs={
|
||||
"input_doc": ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
),
|
||||
"overrides": DocumentMetadataOverrides(tag_ids=tag_ids),
|
||||
},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.FOLDER_CONSUME},
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(f"Error while queuing document {filepath}")
|
||||
|
||||
@@ -170,15 +170,6 @@ class Migration(migrations.Migration):
|
||||
verbose_name="Result Data",
|
||||
),
|
||||
),
|
||||
(
|
||||
"result_message",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
help_text="Human-readable result message",
|
||||
null=True,
|
||||
verbose_name="Result Message",
|
||||
),
|
||||
),
|
||||
(
|
||||
"acknowledged",
|
||||
models.BooleanField(
|
||||
|
||||
@@ -790,13 +790,6 @@ class PaperlessTask(ModelWithOwner):
|
||||
help_text=_("Structured result data from task execution"),
|
||||
)
|
||||
|
||||
result_message = models.TextField(
|
||||
null=True,
|
||||
blank=True,
|
||||
verbose_name=_("Result Message"),
|
||||
help_text=_("Human-readable result message"),
|
||||
)
|
||||
|
||||
# Acknowledgment
|
||||
acknowledged = models.BooleanField(
|
||||
default=False,
|
||||
|
||||
@@ -21,6 +21,7 @@ from guardian.shortcuts import get_users_with_perms
|
||||
|
||||
from documents.search._normalize import ascii_fold
|
||||
from documents.search._query import build_permission_filter
|
||||
from documents.search._query import parse_simple_text_highlight_query
|
||||
from documents.search._query import parse_simple_text_query
|
||||
from documents.search._query import parse_simple_title_query
|
||||
from documents.search._query import parse_user_query
|
||||
@@ -549,6 +550,9 @@ class TantivyBackend:
|
||||
|
||||
self._ensure_open()
|
||||
user_query = self._parse_query(query, search_mode)
|
||||
highlight_query = user_query
|
||||
if search_mode is SearchMode.TEXT:
|
||||
highlight_query = parse_simple_text_highlight_query(self._index, query)
|
||||
|
||||
# For notes_text snippet generation, we need a query that targets the
|
||||
# notes_text field directly. user_query may contain JSON-field terms
|
||||
@@ -601,7 +605,7 @@ class TantivyBackend:
|
||||
if snippet_generator is None:
|
||||
snippet_generator = tantivy.SnippetGenerator.create(
|
||||
searcher,
|
||||
user_query,
|
||||
highlight_query,
|
||||
self._schema,
|
||||
"content",
|
||||
)
|
||||
@@ -610,7 +614,7 @@ class TantivyBackend:
|
||||
if content_html:
|
||||
highlights["content"] = content_html
|
||||
|
||||
if "notes_text" in doc_dict:
|
||||
if search_mode is SearchMode.QUERY and "notes_text" in doc_dict:
|
||||
# Use notes_text (plain text) for snippet generation — tantivy's
|
||||
# SnippetGenerator does not support JSON fields.
|
||||
if notes_snippet_generator is None:
|
||||
|
||||
@@ -490,6 +490,14 @@ _FIELD_BOOSTS = {"title": 2.0}
|
||||
_SIMPLE_FIELD_BOOSTS = {"simple_title": 2.0}
|
||||
|
||||
|
||||
def _simple_query_tokens(raw_query: str) -> list[str]:
|
||||
tokens = [
|
||||
ascii_fold(token.lower())
|
||||
for token in _SIMPLE_QUERY_TOKEN_RE.findall(raw_query, timeout=_REGEX_TIMEOUT)
|
||||
]
|
||||
return [token for token in tokens if token]
|
||||
|
||||
|
||||
def _build_simple_field_query(
|
||||
index: tantivy.Index,
|
||||
field: str,
|
||||
@@ -585,11 +593,7 @@ def parse_simple_query(
|
||||
|
||||
Query string is escaped and normalized to be treated as "simple" text query.
|
||||
"""
|
||||
tokens = [
|
||||
ascii_fold(token.lower())
|
||||
for token in _SIMPLE_QUERY_TOKEN_RE.findall(raw_query, timeout=_REGEX_TIMEOUT)
|
||||
]
|
||||
tokens = [token for token in tokens if token]
|
||||
tokens = _simple_query_tokens(raw_query)
|
||||
if not tokens:
|
||||
return tantivy.Query.empty_query()
|
||||
|
||||
@@ -602,6 +606,23 @@ def parse_simple_query(
|
||||
return tantivy.Query.boolean_query(field_queries)
|
||||
|
||||
|
||||
def parse_simple_text_highlight_query(
|
||||
index: tantivy.Index,
|
||||
raw_query: str,
|
||||
) -> tantivy.Query:
|
||||
"""Build a snippet-friendly query for simple text searches.
|
||||
|
||||
Simple search matching uses regex queries but for compatibility with Tantivy
|
||||
SnippetGenerator we build a plain term query over the content field instead.
|
||||
"""
|
||||
|
||||
tokens = _simple_query_tokens(raw_query)
|
||||
if not tokens:
|
||||
return tantivy.Query.empty_query()
|
||||
|
||||
return index.parse_query(" ".join(tokens), ["content"])
|
||||
|
||||
|
||||
def parse_simple_text_query(
|
||||
index: tantivy.Index,
|
||||
raw_query: str,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import shutil
|
||||
from typing import TYPE_CHECKING
|
||||
@@ -100,9 +101,9 @@ def needs_rebuild(index_dir: Path) -> bool:
|
||||
"""
|
||||
Check if the search index needs rebuilding.
|
||||
|
||||
Compares the current schema version and search language configuration
|
||||
against sentinel files to determine if the index is compatible with
|
||||
the current paperless-ngx version and settings.
|
||||
Reads .index_settings.json to compare the stored schema version and
|
||||
search language against the current configuration. Returns True if the
|
||||
file is missing, unparsable, or either value mismatches.
|
||||
|
||||
Args:
|
||||
index_dir: Path to the search index directory
|
||||
@@ -110,24 +111,19 @@ def needs_rebuild(index_dir: Path) -> bool:
|
||||
Returns:
|
||||
True if the index needs rebuilding, False if it's up to date
|
||||
"""
|
||||
version_file = index_dir / ".schema_version"
|
||||
if not version_file.exists():
|
||||
settings_file = index_dir / ".index_settings.json"
|
||||
if not settings_file.exists():
|
||||
return True
|
||||
try:
|
||||
if int(version_file.read_text().strip()) != SCHEMA_VERSION:
|
||||
data = json.loads(settings_file.read_text())
|
||||
if data.get("schema_version") != SCHEMA_VERSION:
|
||||
logger.info("Search index schema version mismatch - rebuilding.")
|
||||
return True
|
||||
if "language" not in data or data["language"] != settings.SEARCH_LANGUAGE:
|
||||
logger.info("Search index language changed - rebuilding.")
|
||||
return True
|
||||
except ValueError:
|
||||
return True
|
||||
|
||||
language_file = index_dir / ".schema_language"
|
||||
if not language_file.exists():
|
||||
logger.info("Search index language sentinel missing - rebuilding.")
|
||||
return True
|
||||
if language_file.read_text().strip() != (settings.SEARCH_LANGUAGE or ""):
|
||||
logger.info("Search index language changed - rebuilding.")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@@ -149,9 +145,16 @@ def wipe_index(index_dir: Path) -> None:
|
||||
|
||||
|
||||
def _write_sentinels(index_dir: Path) -> None:
|
||||
"""Write schema version and language sentinel files so the next index open can skip rebuilding."""
|
||||
(index_dir / ".schema_version").write_text(str(SCHEMA_VERSION))
|
||||
(index_dir / ".schema_language").write_text(settings.SEARCH_LANGUAGE or "")
|
||||
"""Write .index_settings.json so the next index open can skip rebuilding."""
|
||||
settings_file = index_dir / ".index_settings.json"
|
||||
settings_file.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"schema_version": SCHEMA_VERSION,
|
||||
"language": settings.SEARCH_LANGUAGE,
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def open_or_rebuild_index(index_dir: Path | None = None) -> tantivy.Index:
|
||||
|
||||
@@ -1604,6 +1604,7 @@ class RotateDocumentsSerializer(DocumentSelectionSerializer, SourceModeValidatio
|
||||
required=False,
|
||||
default=bulk_edit.SourceModeChoices.LATEST_VERSION,
|
||||
)
|
||||
from_webui = serializers.BooleanField(required=False, default=False)
|
||||
|
||||
|
||||
class MergeDocumentsSerializer(DocumentListSerializer, SourceModeValidationMixin):
|
||||
@@ -1617,6 +1618,7 @@ class MergeDocumentsSerializer(DocumentListSerializer, SourceModeValidationMixin
|
||||
required=False,
|
||||
default=bulk_edit.SourceModeChoices.LATEST_VERSION,
|
||||
)
|
||||
from_webui = serializers.BooleanField(required=False, default=False)
|
||||
|
||||
|
||||
class EditPdfDocumentsSerializer(DocumentListSerializer, SourceModeValidationMixin):
|
||||
@@ -1628,6 +1630,7 @@ class EditPdfDocumentsSerializer(DocumentListSerializer, SourceModeValidationMix
|
||||
required=False,
|
||||
default=bulk_edit.SourceModeChoices.LATEST_VERSION,
|
||||
)
|
||||
from_webui = serializers.BooleanField(required=False, default=False)
|
||||
|
||||
def validate(self, attrs):
|
||||
documents = attrs["documents"]
|
||||
@@ -1679,6 +1682,7 @@ class RemovePasswordDocumentsSerializer(
|
||||
required=False,
|
||||
default=bulk_edit.SourceModeChoices.LATEST_VERSION,
|
||||
)
|
||||
from_webui = serializers.BooleanField(required=False, default=False)
|
||||
|
||||
|
||||
class DeleteDocumentsSerializer(DocumentSelectionSerializer):
|
||||
@@ -1726,6 +1730,7 @@ class BulkEditSerializer(
|
||||
)
|
||||
|
||||
parameters = serializers.DictField(allow_empty=True, default={}, write_only=True)
|
||||
from_webui = serializers.BooleanField(required=False, default=False)
|
||||
|
||||
def _validate_tag_id_list(self, tags, name="tags") -> None:
|
||||
if not isinstance(tags, list):
|
||||
@@ -2398,7 +2403,10 @@ class StoragePathSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
"""
|
||||
doc_ids = [doc.id for doc in instance.documents.all()]
|
||||
if doc_ids:
|
||||
bulk_edit.bulk_update_documents.delay(doc_ids)
|
||||
bulk_edit.bulk_update_documents.apply_async(
|
||||
kwargs={"document_ids": doc_ids},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.SYSTEM},
|
||||
)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
@@ -2469,7 +2477,6 @@ class TaskSerializerV10(OwnedObjectSerializer):
|
||||
"wait_time_seconds",
|
||||
"input_data",
|
||||
"result_data",
|
||||
"result_message",
|
||||
"related_document_ids",
|
||||
"acknowledged",
|
||||
"owner",
|
||||
@@ -2496,12 +2503,8 @@ class TaskSerializerV9(serializers.ModelSerializer):
|
||||
# v9 field: status -> uppercase Celery state strings
|
||||
status = serializers.SerializerMethodField()
|
||||
|
||||
# v9 field: result -> result_message (with legacy format fallback)
|
||||
result = serializers.CharField(
|
||||
source="result_message",
|
||||
read_only=True,
|
||||
allow_null=True,
|
||||
)
|
||||
# v9 field: result -> derived from result_data
|
||||
result = serializers.SerializerMethodField()
|
||||
|
||||
# v9 field: related_document -> first document ID from result_data
|
||||
related_document = serializers.SerializerMethodField()
|
||||
@@ -2533,6 +2536,20 @@ class TaskSerializerV9(serializers.ModelSerializer):
|
||||
PaperlessTask.TaskType.LLM_INDEX: "llmindex_update",
|
||||
}
|
||||
|
||||
def get_result(self, obj: PaperlessTask) -> str | None:
|
||||
"""Reconstruct a human-readable result string from result_data for v9 clients."""
|
||||
if not obj.result_data:
|
||||
return None
|
||||
if doc_id := obj.result_data.get("document_id"):
|
||||
return f"Success. New document id {doc_id} created"
|
||||
if reason := obj.result_data.get("reason"):
|
||||
return reason
|
||||
if dup_id := obj.result_data.get("duplicate_of"):
|
||||
return f"Not consuming: It is a duplicate of document #{dup_id}"
|
||||
if error := obj.result_data.get("error_message"):
|
||||
return error
|
||||
return None
|
||||
|
||||
def get_task_name(self, obj: PaperlessTask) -> str:
|
||||
return self._TASK_TYPE_TO_V9_NAME.get(obj.task_type, obj.task_type)
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ from __future__ import annotations
|
||||
import datetime
|
||||
import hashlib
|
||||
import logging
|
||||
import re as _re
|
||||
import shutil
|
||||
import traceback as _tb
|
||||
from pathlib import Path
|
||||
@@ -34,7 +33,6 @@ from documents import matching
|
||||
from documents.caching import clear_document_caches
|
||||
from documents.caching import invalidate_llm_suggestions_cache
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.file_handling import create_source_path_directory
|
||||
from documents.file_handling import delete_empty_directories
|
||||
from documents.file_handling import generate_filename
|
||||
@@ -709,7 +707,7 @@ def check_paths_and_prune_custom_fields(
|
||||
and instance.fields.count() > 0
|
||||
and instance.extra_data
|
||||
): # Only select fields, for now
|
||||
process_cf_select_update.delay(instance)
|
||||
process_cf_select_update.apply_async(kwargs={"custom_field": instance})
|
||||
|
||||
|
||||
@receiver(models.signals.post_delete, sender=CustomField)
|
||||
@@ -1024,27 +1022,9 @@ _CELERY_STATE_TO_STATUS: dict[str, PaperlessTask.Status] = {
|
||||
"REVOKED": PaperlessTask.Status.REVOKED,
|
||||
}
|
||||
|
||||
_DOCUMENT_SOURCE_TO_TRIGGER: dict[DocumentSource, PaperlessTask.TriggerSource] = {
|
||||
DocumentSource.ConsumeFolder: PaperlessTask.TriggerSource.FOLDER_CONSUME,
|
||||
DocumentSource.ApiUpload: PaperlessTask.TriggerSource.API_UPLOAD,
|
||||
DocumentSource.MailFetch: PaperlessTask.TriggerSource.EMAIL_CONSUME,
|
||||
DocumentSource.WebUI: PaperlessTask.TriggerSource.WEB_UI,
|
||||
}
|
||||
|
||||
|
||||
def _get_consume_args(
|
||||
args: tuple,
|
||||
task_kwargs: dict,
|
||||
) -> tuple[Any | None, Any | None]:
|
||||
"""Extract (input_doc, overrides) from consume_file task arguments."""
|
||||
input_doc = args[0] if args else task_kwargs.get("input_doc")
|
||||
overrides = args[1] if len(args) >= 2 else task_kwargs.get("overrides")
|
||||
return input_doc, overrides
|
||||
|
||||
|
||||
def _extract_input_data(
|
||||
task_type: PaperlessTask.TaskType,
|
||||
args: tuple,
|
||||
task_kwargs: dict,
|
||||
) -> dict:
|
||||
"""Build the input_data dict stored on the PaperlessTask record.
|
||||
@@ -1055,8 +1035,9 @@ def _extract_input_data(
|
||||
types store no input data and return {}.
|
||||
"""
|
||||
if task_type == PaperlessTask.TaskType.CONSUME_FILE:
|
||||
input_doc, overrides = _get_consume_args(args, task_kwargs)
|
||||
if input_doc is None: # pragma: no cover
|
||||
input_doc = task_kwargs.get("input_doc")
|
||||
overrides = task_kwargs.get("overrides")
|
||||
if input_doc is None:
|
||||
return {}
|
||||
data: dict = {
|
||||
"filename": input_doc.original_file.name,
|
||||
@@ -1081,7 +1062,7 @@ def _extract_input_data(
|
||||
return data
|
||||
|
||||
if task_type == PaperlessTask.TaskType.MAIL_FETCH:
|
||||
account_ids = args[0] if args else task_kwargs.get("account_ids")
|
||||
account_ids = task_kwargs.get("account_ids")
|
||||
if account_ids is not None:
|
||||
return {"account_ids": account_ids}
|
||||
return {}
|
||||
@@ -1090,70 +1071,35 @@ def _extract_input_data(
|
||||
|
||||
|
||||
def _determine_trigger_source(
|
||||
task_type: PaperlessTask.TaskType,
|
||||
args: tuple,
|
||||
task_kwargs: dict,
|
||||
headers: dict,
|
||||
) -> PaperlessTask.TriggerSource:
|
||||
"""Resolve the TriggerSource for a task being published to the broker.
|
||||
|
||||
Priority order:
|
||||
1. Explicit trigger_source header (set by beat schedule or apply_async callers).
|
||||
2. For consume_file tasks, the DocumentSource on the input document.
|
||||
3. MANUAL as the catch-all for all other cases.
|
||||
Reads the trigger_source header set by the caller; falls back to MANUAL
|
||||
when the header is absent or contains an unrecognised value.
|
||||
"""
|
||||
# Explicit header takes priority -- callers pass a TriggerSource DB value directly.
|
||||
header_source = headers.get("trigger_source")
|
||||
if header_source is not None:
|
||||
try:
|
||||
return PaperlessTask.TriggerSource(header_source)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if task_type == PaperlessTask.TaskType.CONSUME_FILE:
|
||||
input_doc, _ = _get_consume_args(args, task_kwargs)
|
||||
if input_doc is not None:
|
||||
return _DOCUMENT_SOURCE_TO_TRIGGER.get(
|
||||
input_doc.source,
|
||||
PaperlessTask.TriggerSource.API_UPLOAD,
|
||||
)
|
||||
|
||||
return PaperlessTask.TriggerSource.MANUAL
|
||||
|
||||
|
||||
def _extract_owner_id(
|
||||
task_type: PaperlessTask.TaskType,
|
||||
args: tuple,
|
||||
task_kwargs: dict,
|
||||
) -> int | None:
|
||||
"""Return the owner_id from consume_file overrides, or None for all other task types."""
|
||||
if task_type != PaperlessTask.TaskType.CONSUME_FILE:
|
||||
return None
|
||||
_, overrides = _get_consume_args(args, task_kwargs)
|
||||
overrides = task_kwargs.get("overrides")
|
||||
if overrides and hasattr(overrides, "owner_id"):
|
||||
return overrides.owner_id
|
||||
return None # pragma: no cover
|
||||
|
||||
|
||||
def _parse_consume_result(result: str) -> dict | None:
|
||||
"""Parse a consume_file string result into a structured dict.
|
||||
|
||||
consume_file returns human-readable strings rather than dicts (e.g.
|
||||
"Success. New document id 42 created" or "It is a duplicate of foo (#7)").
|
||||
This function extracts the document ID or duplicate reference so the
|
||||
result can be stored as structured data on the PaperlessTask record.
|
||||
Returns None when the string does not match any known pattern.
|
||||
"""
|
||||
if match := _re.search(r"New document id (\d+) created", result):
|
||||
return {"document_id": int(match.group(1))}
|
||||
if match := _re.search(r"It is a duplicate of .* \(#(\d+)\)", result):
|
||||
return {
|
||||
"duplicate_of": int(match.group(1)),
|
||||
"duplicate_in_trash": "existing document is in the trash" in result,
|
||||
}
|
||||
return None # pragma: no cover
|
||||
|
||||
|
||||
@before_task_publish.connect
|
||||
def before_task_publish_handler(
|
||||
sender=None,
|
||||
@@ -1177,17 +1123,12 @@ def before_task_publish_handler(
|
||||
|
||||
try:
|
||||
close_old_connections()
|
||||
args, task_kwargs, _ = body
|
||||
_, task_kwargs, _ = body
|
||||
task_id = headers["id"]
|
||||
|
||||
input_data = _extract_input_data(task_type, args, task_kwargs)
|
||||
trigger_source = _determine_trigger_source(
|
||||
task_type,
|
||||
args,
|
||||
task_kwargs,
|
||||
headers,
|
||||
)
|
||||
owner_id = _extract_owner_id(task_type, args, task_kwargs)
|
||||
input_data = _extract_input_data(task_type, task_kwargs)
|
||||
trigger_source = _determine_trigger_source(headers)
|
||||
owner_id = _extract_owner_id(task_type, task_kwargs)
|
||||
|
||||
PaperlessTask.objects.create(
|
||||
task_id=task_id,
|
||||
@@ -1235,8 +1176,7 @@ def task_postrun_handler(
|
||||
Records task completion and result data for non-failure outcomes.
|
||||
|
||||
Skips FAILURE states entirely, since task_failure_handler fires first
|
||||
and fully owns the failure path (status, date_done, duration,
|
||||
result_data, result_message).
|
||||
and fully owns the failure path (status, date_done, duration, result_data).
|
||||
|
||||
https://docs.celeryq.dev/en/stable/userguide/signals.html#task-postrun
|
||||
"""
|
||||
@@ -1275,10 +1215,9 @@ def task_postrun_handler(
|
||||
if isinstance(retval, dict):
|
||||
task_instance.result_data = retval
|
||||
changed_fields.append("result_data")
|
||||
elif isinstance(retval, str):
|
||||
task_instance.result_message = retval
|
||||
task_instance.result_data = _parse_consume_result(retval)
|
||||
changed_fields.extend(["result_message", "result_data"])
|
||||
if "duplicate_of" in retval:
|
||||
task_instance.status = PaperlessTask.Status.FAILURE
|
||||
changed_fields.append("status")
|
||||
|
||||
task_instance.save(update_fields=changed_fields)
|
||||
except Exception: # pragma: no cover
|
||||
@@ -1321,7 +1260,6 @@ def task_failure_handler(
|
||||
update_fields: dict = {
|
||||
"status": PaperlessTask.Status.FAILURE,
|
||||
"result_data": result_data,
|
||||
"result_message": str(exception) if exception else None,
|
||||
"date_done": now,
|
||||
}
|
||||
|
||||
@@ -1399,7 +1337,7 @@ def add_or_update_document_in_llm_index(sender, document, **kwargs):
|
||||
if ai_config.llm_index_enabled:
|
||||
from documents.tasks import update_document_in_llm_index
|
||||
|
||||
update_document_in_llm_index.delay(document)
|
||||
update_document_in_llm_index.apply_async(kwargs={"document": document})
|
||||
|
||||
|
||||
@receiver(models.signals.post_delete, sender=Document)
|
||||
@@ -1415,4 +1353,4 @@ def delete_document_from_llm_index(
|
||||
if ai_config.llm_index_enabled:
|
||||
from documents.tasks import remove_document_from_llm_index
|
||||
|
||||
remove_document_from_llm_index.delay(instance)
|
||||
remove_document_from_llm_index.apply_async(kwargs={"document": instance})
|
||||
|
||||
@@ -26,11 +26,15 @@ from documents.caching import clear_document_caches
|
||||
from documents.classifier import DocumentClassifier
|
||||
from documents.classifier import load_classifier
|
||||
from documents.consumer import AsnCheckPlugin
|
||||
from documents.consumer import ConsumeFileDuplicateError
|
||||
from documents.consumer import ConsumerPlugin
|
||||
from documents.consumer import ConsumerPreflightPlugin
|
||||
from documents.consumer import WorkflowTriggerPlugin
|
||||
from documents.consumer import should_produce_archive
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import ConsumeFileDuplicateResult
|
||||
from documents.data_models import ConsumeFileStoppedResult
|
||||
from documents.data_models import ConsumeFileSuccessResult
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.double_sided import CollatePlugin
|
||||
from documents.file_handling import create_source_path_directory
|
||||
@@ -40,6 +44,7 @@ from documents.models import Correspondent
|
||||
from documents.models import CustomFieldInstance
|
||||
from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import PaperlessTask
|
||||
from documents.models import ShareLink
|
||||
from documents.models import ShareLinkBundle
|
||||
from documents.models import StoragePath
|
||||
@@ -120,6 +125,11 @@ def consume_file(
|
||||
self: Task,
|
||||
input_doc: ConsumableDocument,
|
||||
overrides: DocumentMetadataOverrides | None = None,
|
||||
) -> (
|
||||
ConsumeFileSuccessResult
|
||||
| ConsumeFileStoppedResult
|
||||
| ConsumeFileDuplicateResult
|
||||
| None
|
||||
):
|
||||
token = consume_task_id.set((self.request.id or "")[:8])
|
||||
try:
|
||||
@@ -152,6 +162,7 @@ def consume_file(
|
||||
TemporaryDirectory(dir=settings.SCRATCH_DIR) as tmp_dir,
|
||||
):
|
||||
tmp_dir = Path(tmp_dir)
|
||||
msg = None
|
||||
for plugin_class in plugins:
|
||||
plugin_name = plugin_class.NAME
|
||||
|
||||
@@ -182,7 +193,14 @@ def consume_file(
|
||||
|
||||
except StopConsumeTaskError as e:
|
||||
logger.info(f"{plugin_name} requested task exit: {e.message}")
|
||||
return e.message
|
||||
return ConsumeFileStoppedResult(reason=e.message)
|
||||
|
||||
except ConsumeFileDuplicateError as e:
|
||||
logger.info(f"{plugin_name} rejected duplicate: {e}")
|
||||
return ConsumeFileDuplicateResult(
|
||||
duplicate_of=e.duplicate_id,
|
||||
duplicate_in_trash=e.in_trash,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"{plugin_name} failed: {e}")
|
||||
@@ -600,7 +618,10 @@ def update_document_parent_tags(tag: Tag, new_parent: Tag) -> None:
|
||||
)
|
||||
|
||||
if affected:
|
||||
bulk_update_documents.delay(document_ids=list(affected))
|
||||
bulk_update_documents.apply_async(
|
||||
kwargs={"document_ids": list(affected)},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.SYSTEM},
|
||||
)
|
||||
|
||||
|
||||
@shared_task
|
||||
|
||||
@@ -78,5 +78,4 @@ class PaperlessTaskFactory(DjangoModelFactory):
|
||||
status = PaperlessTask.Status.PENDING
|
||||
input_data = factory.LazyFunction(dict)
|
||||
result_data = None
|
||||
result_message = None
|
||||
acknowledged = False
|
||||
|
||||
@@ -563,6 +563,26 @@ class TestFieldHandling:
|
||||
class TestHighlightHits:
|
||||
"""Test highlight_hits returns proper HTML strings, not raw Snippet objects."""
|
||||
|
||||
def test_highlights_simple_text_mode_returns_html_string(
|
||||
self,
|
||||
backend: TantivyBackend,
|
||||
):
|
||||
"""Simple text search should still produce content highlights for exact-token hits."""
|
||||
doc = Document.objects.create(
|
||||
title="Highlight Test",
|
||||
content="The quick brown fox jumps over the lazy dog",
|
||||
checksum="HH0",
|
||||
pk=89,
|
||||
)
|
||||
backend.add_or_update(doc)
|
||||
|
||||
hits = backend.highlight_hits("quick", [doc.pk], search_mode=SearchMode.TEXT)
|
||||
|
||||
assert len(hits) == 1
|
||||
highlights = hits[0]["highlights"]
|
||||
assert "content" in highlights
|
||||
assert "<b>" in highlights["content"]
|
||||
|
||||
def test_highlights_content_returns_html_string(self, backend: TantivyBackend):
|
||||
"""highlight_hits must return HTML strings (from Snippet.to_html()), not Snippet objects."""
|
||||
doc = Document.objects.create(
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
@@ -18,7 +19,7 @@ pytestmark = pytest.mark.search
|
||||
class TestNeedsRebuild:
|
||||
"""needs_rebuild covers all sentinel-file states that require a full reindex."""
|
||||
|
||||
def test_returns_true_when_version_file_missing(self, index_dir: Path) -> None:
|
||||
def test_returns_true_when_settings_file_missing(self, index_dir: Path) -> None:
|
||||
assert needs_rebuild(index_dir) is True
|
||||
|
||||
def test_returns_false_when_version_and_language_match(
|
||||
@@ -27,37 +28,51 @@ class TestNeedsRebuild:
|
||||
settings: SettingsWrapper,
|
||||
) -> None:
|
||||
settings.SEARCH_LANGUAGE = "en"
|
||||
(index_dir / ".schema_version").write_text(str(SCHEMA_VERSION))
|
||||
(index_dir / ".schema_language").write_text("en")
|
||||
(index_dir / ".index_settings.json").write_text(
|
||||
json.dumps({"schema_version": SCHEMA_VERSION, "language": "en"}),
|
||||
)
|
||||
assert needs_rebuild(index_dir) is False
|
||||
|
||||
def test_returns_true_on_schema_version_mismatch(self, index_dir: Path) -> None:
|
||||
(index_dir / ".schema_version").write_text(str(SCHEMA_VERSION - 1))
|
||||
assert needs_rebuild(index_dir) is True
|
||||
|
||||
def test_returns_true_when_version_file_not_an_integer(
|
||||
def test_returns_true_on_schema_version_mismatch(
|
||||
self,
|
||||
index_dir: Path,
|
||||
settings: SettingsWrapper,
|
||||
) -> None:
|
||||
(index_dir / ".schema_version").write_text("not-a-number")
|
||||
settings.SEARCH_LANGUAGE = None
|
||||
(index_dir / ".index_settings.json").write_text(
|
||||
json.dumps({"schema_version": SCHEMA_VERSION - 1, "language": None}),
|
||||
)
|
||||
assert needs_rebuild(index_dir) is True
|
||||
|
||||
def test_returns_true_when_language_sentinel_missing(
|
||||
def test_returns_true_when_version_is_not_an_integer(
|
||||
self,
|
||||
index_dir: Path,
|
||||
settings: SettingsWrapper,
|
||||
) -> None:
|
||||
settings.SEARCH_LANGUAGE = None
|
||||
(index_dir / ".index_settings.json").write_text(
|
||||
json.dumps({"schema_version": "not-a-number", "language": None}),
|
||||
)
|
||||
assert needs_rebuild(index_dir) is True
|
||||
|
||||
def test_returns_true_when_language_key_missing(
|
||||
self,
|
||||
index_dir: Path,
|
||||
settings: SettingsWrapper,
|
||||
) -> None:
|
||||
settings.SEARCH_LANGUAGE = "en"
|
||||
(index_dir / ".schema_version").write_text(str(SCHEMA_VERSION))
|
||||
# .schema_language intentionally absent
|
||||
(index_dir / ".index_settings.json").write_text(
|
||||
json.dumps({"schema_version": SCHEMA_VERSION}),
|
||||
)
|
||||
assert needs_rebuild(index_dir) is True
|
||||
|
||||
def test_returns_true_when_language_sentinel_content_differs(
|
||||
def test_returns_true_when_language_differs(
|
||||
self,
|
||||
index_dir: Path,
|
||||
settings: SettingsWrapper,
|
||||
) -> None:
|
||||
settings.SEARCH_LANGUAGE = "de"
|
||||
(index_dir / ".schema_version").write_text(str(SCHEMA_VERSION))
|
||||
(index_dir / ".schema_language").write_text("en")
|
||||
(index_dir / ".index_settings.json").write_text(
|
||||
json.dumps({"schema_version": SCHEMA_VERSION, "language": "en"}),
|
||||
)
|
||||
assert needs_rebuild(index_dir) is True
|
||||
|
||||
@@ -26,7 +26,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
self.user = user
|
||||
self.client.force_authenticate(user=user)
|
||||
|
||||
patcher = mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
patcher = mock.patch("documents.bulk_edit.bulk_update_documents.apply_async")
|
||||
self.async_task = patcher.start()
|
||||
self.addCleanup(patcher.stop)
|
||||
self.c1 = Correspondent.objects.create(name="c1")
|
||||
@@ -62,7 +62,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
m.return_value = return_value
|
||||
m.__name__ = method_name
|
||||
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.apply_async")
|
||||
def test_api_set_correspondent(self, bulk_update_task_mock) -> None:
|
||||
self.assertNotEqual(self.doc1.correspondent, self.c1)
|
||||
response = self.client.post(
|
||||
@@ -79,9 +79,13 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.doc1.refresh_from_db()
|
||||
self.assertEqual(self.doc1.correspondent, self.c1)
|
||||
bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
|
||||
bulk_update_task_mock.assert_called_once()
|
||||
self.assertCountEqual(
|
||||
bulk_update_task_mock.call_args.kwargs["kwargs"]["document_ids"],
|
||||
[self.doc1.pk],
|
||||
)
|
||||
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.apply_async")
|
||||
def test_api_unset_correspondent(self, bulk_update_task_mock) -> None:
|
||||
self.doc1.correspondent = self.c1
|
||||
self.doc1.save()
|
||||
@@ -103,7 +107,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
self.doc1.refresh_from_db()
|
||||
self.assertIsNone(self.doc1.correspondent)
|
||||
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.apply_async")
|
||||
def test_api_set_type(self, bulk_update_task_mock) -> None:
|
||||
self.assertNotEqual(self.doc1.document_type, self.dt1)
|
||||
response = self.client.post(
|
||||
@@ -120,9 +124,13 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.doc1.refresh_from_db()
|
||||
self.assertEqual(self.doc1.document_type, self.dt1)
|
||||
bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
|
||||
bulk_update_task_mock.assert_called_once()
|
||||
self.assertCountEqual(
|
||||
bulk_update_task_mock.call_args.kwargs["kwargs"]["document_ids"],
|
||||
[self.doc1.pk],
|
||||
)
|
||||
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.apply_async")
|
||||
def test_api_unset_type(self, bulk_update_task_mock) -> None:
|
||||
self.doc1.document_type = self.dt1
|
||||
self.doc1.save()
|
||||
@@ -141,9 +149,13 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.doc1.refresh_from_db()
|
||||
self.assertIsNone(self.doc1.document_type)
|
||||
bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
|
||||
bulk_update_task_mock.assert_called_once()
|
||||
self.assertCountEqual(
|
||||
bulk_update_task_mock.call_args.kwargs["kwargs"]["document_ids"],
|
||||
[self.doc1.pk],
|
||||
)
|
||||
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.apply_async")
|
||||
def test_api_add_tag(self, bulk_update_task_mock) -> None:
|
||||
self.assertFalse(self.doc1.tags.filter(pk=self.t1.pk).exists())
|
||||
|
||||
@@ -163,9 +175,13 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertTrue(self.doc1.tags.filter(pk=self.t1.pk).exists())
|
||||
|
||||
bulk_update_task_mock.assert_called_once_with(document_ids=[self.doc1.pk])
|
||||
bulk_update_task_mock.assert_called_once()
|
||||
self.assertCountEqual(
|
||||
bulk_update_task_mock.call_args.kwargs["kwargs"]["document_ids"],
|
||||
[self.doc1.pk],
|
||||
)
|
||||
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.apply_async")
|
||||
def test_api_remove_tag(self, bulk_update_task_mock) -> None:
|
||||
self.doc1.tags.add(self.t1)
|
||||
|
||||
|
||||
@@ -278,7 +278,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.custom_fields.first().value, None)
|
||||
|
||||
@mock.patch("documents.signals.handlers.process_cf_select_update.delay")
|
||||
@mock.patch("documents.signals.handlers.process_cf_select_update.apply_async")
|
||||
def test_custom_field_update_offloaded_once(self, mock_delay) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -322,7 +322,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
}
|
||||
cf_select.save()
|
||||
|
||||
mock_delay.assert_called_once_with(cf_select)
|
||||
mock_delay.assert_called_once_with(kwargs={"custom_field": cf_select})
|
||||
|
||||
def test_create_custom_field_monetary_validation(self) -> None:
|
||||
"""
|
||||
|
||||
@@ -537,7 +537,7 @@ class TestDocumentVersioningApi(DirectoriesMixin, APITestCase):
|
||||
async_task.id = "task-123"
|
||||
|
||||
with mock.patch("documents.views.consume_file") as consume_mock:
|
||||
consume_mock.delay.return_value = async_task
|
||||
consume_mock.apply_async.return_value = async_task
|
||||
resp = self.client.post(
|
||||
f"/api/documents/{root.id}/update_version/",
|
||||
{"document": upload, "version_label": " New Version "},
|
||||
@@ -546,8 +546,9 @@ class TestDocumentVersioningApi(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.data, "task-123")
|
||||
consume_mock.delay.assert_called_once()
|
||||
input_doc, overrides = consume_mock.delay.call_args[0]
|
||||
consume_mock.apply_async.assert_called_once()
|
||||
task_kwargs = consume_mock.apply_async.call_args.kwargs["kwargs"]
|
||||
input_doc, overrides = task_kwargs["input_doc"], task_kwargs["overrides"]
|
||||
self.assertEqual(input_doc.root_document_id, root.id)
|
||||
self.assertEqual(input_doc.source, DocumentSource.ApiUpload)
|
||||
self.assertEqual(overrides.version_label, "New Version")
|
||||
@@ -571,7 +572,7 @@ class TestDocumentVersioningApi(DirectoriesMixin, APITestCase):
|
||||
async_task.id = "task-123"
|
||||
|
||||
with mock.patch("documents.views.consume_file") as consume_mock:
|
||||
consume_mock.delay.return_value = async_task
|
||||
consume_mock.apply_async.return_value = async_task
|
||||
resp = self.client.post(
|
||||
f"/api/documents/{version.id}/update_version/",
|
||||
{"document": upload, "version_label": " New Version "},
|
||||
@@ -580,8 +581,9 @@ class TestDocumentVersioningApi(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.data, "task-123")
|
||||
consume_mock.delay.assert_called_once()
|
||||
input_doc, overrides = consume_mock.delay.call_args[0]
|
||||
consume_mock.apply_async.assert_called_once()
|
||||
task_kwargs = consume_mock.apply_async.call_args.kwargs["kwargs"]
|
||||
input_doc, overrides = task_kwargs["input_doc"], task_kwargs["overrides"]
|
||||
self.assertEqual(input_doc.root_document_id, root.id)
|
||||
self.assertEqual(overrides.version_label, "New Version")
|
||||
self.assertEqual(overrides.actor_id, self.user.id)
|
||||
@@ -595,7 +597,7 @@ class TestDocumentVersioningApi(DirectoriesMixin, APITestCase):
|
||||
upload = self._make_pdf_upload()
|
||||
|
||||
with mock.patch("documents.views.consume_file") as consume_mock:
|
||||
consume_mock.delay.side_effect = Exception("boom")
|
||||
consume_mock.apply_async.side_effect = Exception("boom")
|
||||
resp = self.client.post(
|
||||
f"/api/documents/{root.id}/update_version/",
|
||||
{"document": upload},
|
||||
|
||||
@@ -47,11 +47,11 @@ from documents.models import Workflow
|
||||
from documents.models import WorkflowAction
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.signals.handlers import run_workflows
|
||||
from documents.tests.utils import ConsumeTaskMixin
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from documents.tests.utils import DocumentConsumeDelayMixin
|
||||
|
||||
|
||||
class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
class TestDocumentApi(DirectoriesMixin, ConsumeTaskMixin, APITestCase):
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
|
||||
@@ -1400,9 +1400,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
input_doc, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(input_doc.original_file.name, "simple.pdf")
|
||||
self.assertTrue(
|
||||
@@ -1432,9 +1430,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
input_doc, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(input_doc.original_file.name, "outside.pdf")
|
||||
self.assertEqual(overrides.filename, "outside.pdf")
|
||||
@@ -1474,9 +1470,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
input_doc, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(input_doc.original_file.name, "outside.pdf")
|
||||
self.assertEqual(overrides.filename, "outside.pdf")
|
||||
@@ -1558,9 +1552,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
input_doc, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(input_doc.original_file.name, "simple.pdf")
|
||||
self.assertTrue(
|
||||
@@ -1612,9 +1604,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
_, overrides = self.get_last_consume_delay_call_args()
|
||||
_, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(overrides.title, "my custom title")
|
||||
self.assertIsNone(overrides.correspondent_id)
|
||||
@@ -1634,9 +1624,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
_, overrides = self.get_last_consume_delay_call_args()
|
||||
_, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(overrides.correspondent_id, c.id)
|
||||
self.assertIsNone(overrides.title)
|
||||
@@ -1670,9 +1658,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
_, overrides = self.get_last_consume_delay_call_args()
|
||||
_, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(overrides.document_type_id, dt.id)
|
||||
self.assertIsNone(overrides.correspondent_id)
|
||||
@@ -1706,9 +1692,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
_, overrides = self.get_last_consume_delay_call_args()
|
||||
_, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(overrides.storage_path_id, sp.id)
|
||||
self.assertIsNone(overrides.correspondent_id)
|
||||
@@ -1743,9 +1727,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
_, overrides = self.get_last_consume_delay_call_args()
|
||||
_, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertCountEqual(overrides.tag_ids, [t1.id, t2.id])
|
||||
self.assertIsNone(overrides.document_type_id)
|
||||
@@ -1790,9 +1772,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
_, overrides = self.get_last_consume_delay_call_args()
|
||||
_, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(overrides.created, created.date())
|
||||
|
||||
@@ -1809,9 +1789,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
input_doc, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(input_doc.original_file.name, "simple.pdf")
|
||||
self.assertEqual(overrides.filename, "simple.pdf")
|
||||
@@ -1841,9 +1819,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
input_doc, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(input_doc.original_file.name, "simple.pdf")
|
||||
self.assertEqual(overrides.filename, "simple.pdf")
|
||||
@@ -1898,9 +1874,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
input_doc, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
new_overrides, _ = run_workflows(
|
||||
trigger_type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
@@ -1946,9 +1920,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
input_doc, overrides = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(input_doc.original_file.name, "simple.pdf")
|
||||
self.assertEqual(overrides.filename, "simple.pdf")
|
||||
@@ -2047,9 +2019,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
input_doc, _ = self.get_last_consume_delay_call_args()
|
||||
input_doc, _ = self.assert_queue_consumption_task_call_args()
|
||||
|
||||
self.assertEqual(input_doc.source, WorkflowTrigger.DocumentSourceChoices.WEB_UI)
|
||||
|
||||
|
||||
@@ -9,8 +9,6 @@ from rest_framework.test import APITestCase
|
||||
|
||||
from documents.models import CustomField
|
||||
from documents.models import Document
|
||||
from documents.models import SavedView
|
||||
from documents.models import SavedViewFilterRule
|
||||
from documents.serialisers import DocumentSerializer
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
@@ -455,111 +453,6 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
|
||||
),
|
||||
)
|
||||
|
||||
def test_exact_monetary(self) -> None:
|
||||
# "exact" should match by numeric amount, ignoring currency code prefix.
|
||||
self._assert_query_match_predicate(
|
||||
["monetary_field", "exact", "100"],
|
||||
lambda document: (
|
||||
"monetary_field" in document
|
||||
and document["monetary_field"] == "USD100.00"
|
||||
),
|
||||
)
|
||||
self._assert_query_match_predicate(
|
||||
["monetary_field", "exact", "101"],
|
||||
lambda document: (
|
||||
"monetary_field" in document and document["monetary_field"] == "101.00"
|
||||
),
|
||||
)
|
||||
|
||||
def test_in_monetary(self) -> None:
|
||||
# "in" should match by numeric amount, ignoring currency code prefix.
|
||||
self._assert_query_match_predicate(
|
||||
["monetary_field", "in", ["100", "50"]],
|
||||
lambda document: (
|
||||
"monetary_field" in document
|
||||
and document["monetary_field"] in {"USD100.00", "EUR50.00"}
|
||||
),
|
||||
)
|
||||
|
||||
def test_exact_monetary_with_currency_prefix(self) -> None:
|
||||
# Providing a currency-prefixed string like "USD100.00" for an exact monetary
|
||||
# filter should work for backwards compatibility with saved views. The currency
|
||||
# code is stripped and the numeric amount is used for comparison.
|
||||
self._assert_query_match_predicate(
|
||||
["monetary_field", "exact", "USD100.00"],
|
||||
lambda document: (
|
||||
"monetary_field" in document
|
||||
and document["monetary_field"] == "USD100.00"
|
||||
),
|
||||
)
|
||||
self._assert_query_match_predicate(
|
||||
["monetary_field", "in", ["USD100.00", "EUR50.00"]],
|
||||
lambda document: (
|
||||
"monetary_field" in document
|
||||
and document["monetary_field"] in {"USD100.00", "EUR50.00"}
|
||||
),
|
||||
)
|
||||
self._assert_query_match_predicate(
|
||||
["monetary_field", "gt", "USD99.00"],
|
||||
lambda document: (
|
||||
"monetary_field" in document
|
||||
and document["monetary_field"] is not None
|
||||
and (
|
||||
document["monetary_field"] == "USD100.00"
|
||||
or document["monetary_field"] == "101.00"
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
def test_saved_view_with_currency_prefixed_monetary_filter(self) -> None:
|
||||
"""
|
||||
A saved view created before the exact-monetary fix stored currency-prefixed
|
||||
values like '["monetary_field", "exact", "USD100.00"]' as the filter rule value
|
||||
(rule_type=42). Those saved views must continue to return correct results.
|
||||
"""
|
||||
saved_view = SavedView.objects.create(name="test view", owner=self.user)
|
||||
SavedViewFilterRule.objects.create(
|
||||
saved_view=saved_view,
|
||||
rule_type=42, # FILTER_CUSTOM_FIELDS_QUERY
|
||||
value=json.dumps(["monetary_field", "exact", "USD100.00"]),
|
||||
)
|
||||
# The frontend translates rule_type=42 to the custom_field_query URL param;
|
||||
# simulate that here using the stored filter rule value directly.
|
||||
rule = saved_view.filter_rules.get(rule_type=42)
|
||||
query_string = quote(rule.value, safe="")
|
||||
response = self.client.get(
|
||||
"/api/documents/?"
|
||||
+ "&".join(
|
||||
(
|
||||
f"custom_field_query={query_string}",
|
||||
"ordering=archive_serial_number",
|
||||
"page=1",
|
||||
f"page_size={len(self.documents)}",
|
||||
"truncate_content=true",
|
||||
),
|
||||
),
|
||||
)
|
||||
self.assertEqual(response.status_code, 200, msg=str(response.json()))
|
||||
result_ids = {doc["id"] for doc in response.json()["results"]}
|
||||
# Should match the single document with monetary_field = "USD100.00"
|
||||
expected_ids = {
|
||||
doc.id
|
||||
for doc in self.documents
|
||||
if doc.custom_fields.filter(
|
||||
field__name="monetary_field",
|
||||
value_monetary="USD100.00",
|
||||
).exists()
|
||||
}
|
||||
self.assertEqual(result_ids, expected_ids)
|
||||
|
||||
def test_monetary_amount_with_invalid_value(self) -> None:
|
||||
# A value that has a currency prefix but no valid number after it should fail.
|
||||
self._assert_validation_error(
|
||||
json.dumps(["monetary_field", "exact", "USDnotanumber"]),
|
||||
["custom_field_query", "2"],
|
||||
"valid number",
|
||||
)
|
||||
|
||||
# ==========================================================#
|
||||
# Subset check (document link field only) #
|
||||
# ==========================================================#
|
||||
|
||||
@@ -291,7 +291,7 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
self.assertEqual(StoragePath.objects.count(), 2)
|
||||
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.apply_async")
|
||||
def test_api_update_storage_path(self, bulk_update_mock) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -316,11 +316,12 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
|
||||
|
||||
bulk_update_mock.assert_called_once()
|
||||
|
||||
args, _ = bulk_update_mock.call_args
|
||||
self.assertCountEqual(
|
||||
[document.pk],
|
||||
bulk_update_mock.call_args.kwargs["kwargs"]["document_ids"],
|
||||
)
|
||||
|
||||
self.assertCountEqual([document.pk], args[0])
|
||||
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.apply_async")
|
||||
def test_api_delete_storage_path(self, bulk_update_mock) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -347,7 +348,11 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
|
||||
|
||||
# only called once
|
||||
bulk_update_mock.assert_called_once_with([document.pk])
|
||||
bulk_update_mock.assert_called_once()
|
||||
self.assertCountEqual(
|
||||
[document.pk],
|
||||
bulk_update_mock.call_args.kwargs["kwargs"]["document_ids"],
|
||||
)
|
||||
|
||||
def test_test_storage_path(self) -> None:
|
||||
"""
|
||||
|
||||
@@ -6,6 +6,12 @@ from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def api_schema():
|
||||
generator = SchemaGenerator()
|
||||
return generator.get_schema(request=None, public=True)
|
||||
|
||||
|
||||
class TestApiSchema(APITestCase):
|
||||
ENDPOINT = "/api/schema/"
|
||||
|
||||
@@ -70,26 +76,17 @@ class TestApiSchema(APITestCase):
|
||||
self.assertIn(action_method, advertised_methods)
|
||||
|
||||
|
||||
# ---- session-scoped fixture: generate schema once for all TestXxx classes ----
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def api_schema():
|
||||
generator = SchemaGenerator()
|
||||
return generator.get_schema(request=None, public=True)
|
||||
|
||||
|
||||
class TestTasksSummarySchema:
|
||||
"""tasks_summary_retrieve: response must be an array of TaskSummarySerializer."""
|
||||
|
||||
def test_summary_response_is_array(self, api_schema):
|
||||
def test_summary_response_is_array(self, api_schema: SchemaGenerator):
|
||||
op = api_schema["paths"]["/api/tasks/summary/"]["get"]
|
||||
resp_200 = op["responses"]["200"]["content"]["application/json"]["schema"]
|
||||
assert resp_200["type"] == "array", (
|
||||
"tasks_summary_retrieve response must be type:array"
|
||||
)
|
||||
|
||||
def test_summary_items_have_total_count(self, api_schema):
|
||||
def test_summary_items_have_total_count(self, api_schema: SchemaGenerator):
|
||||
op = api_schema["paths"]["/api/tasks/summary/"]["get"]
|
||||
resp_200 = op["responses"]["200"]["content"]["application/json"]["schema"]
|
||||
items = resp_200.get("items", {})
|
||||
@@ -107,14 +104,14 @@ class TestTasksSummarySchema:
|
||||
class TestTasksActiveSchema:
|
||||
"""tasks_active_retrieve: response must be an array of TaskSerializerV10."""
|
||||
|
||||
def test_active_response_is_array(self, api_schema):
|
||||
def test_active_response_is_array(self, api_schema: SchemaGenerator):
|
||||
op = api_schema["paths"]["/api/tasks/active/"]["get"]
|
||||
resp_200 = op["responses"]["200"]["content"]["application/json"]["schema"]
|
||||
assert resp_200["type"] == "array", (
|
||||
"tasks_active_retrieve response must be type:array"
|
||||
)
|
||||
|
||||
def test_active_items_ref_named_schema(self, api_schema):
|
||||
def test_active_items_ref_named_schema(self, api_schema: SchemaGenerator):
|
||||
op = api_schema["paths"]["/api/tasks/active/"]["get"]
|
||||
resp_200 = op["responses"]["200"]["content"]["application/json"]["schema"]
|
||||
items = resp_200.get("items", {})
|
||||
@@ -122,3 +119,129 @@ class TestTasksActiveSchema:
|
||||
component_name = ref.split("/")[-1] if ref else ""
|
||||
assert component_name, "items should be a $ref to a named schema"
|
||||
assert component_name in api_schema["components"]["schemas"]
|
||||
|
||||
|
||||
class TestMetadataSchema:
|
||||
"""Metadata component: array fields and optional archive fields."""
|
||||
|
||||
@pytest.mark.parametrize("field", ["original_metadata", "archive_metadata"])
|
||||
def test_metadata_field_is_array(self, api_schema: SchemaGenerator, field: str):
|
||||
props = api_schema["components"]["schemas"]["Metadata"]["properties"]
|
||||
assert props[field]["type"] == "array", (
|
||||
f"{field} should be type:array, not type:object"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("field", ["original_metadata", "archive_metadata"])
|
||||
def test_metadata_items_have_key_field(
|
||||
self,
|
||||
api_schema: SchemaGenerator,
|
||||
field: str,
|
||||
):
|
||||
props = api_schema["components"]["schemas"]["Metadata"]["properties"]
|
||||
items = props[field]["items"]
|
||||
ref = items.get("$ref", "")
|
||||
component_name = ref.split("/")[-1] if ref else ""
|
||||
if component_name:
|
||||
item_props = api_schema["components"]["schemas"][component_name][
|
||||
"properties"
|
||||
]
|
||||
else:
|
||||
item_props = items.get("properties", {})
|
||||
assert "key" in item_props
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"field",
|
||||
[
|
||||
"archive_checksum",
|
||||
"archive_media_filename",
|
||||
"archive_size",
|
||||
"archive_metadata",
|
||||
],
|
||||
)
|
||||
def test_archive_field_not_required(self, api_schema, field):
|
||||
schema = api_schema["components"]["schemas"]["Metadata"]
|
||||
required = schema.get("required", [])
|
||||
assert field not in required
|
||||
props = schema["properties"]
|
||||
assert props[field].get("nullable") is True, (
|
||||
f"{field} should be nullable (allow_null=True)"
|
||||
)
|
||||
|
||||
|
||||
class TestStoragePathTestSchema:
|
||||
"""storage_paths_test_create: response must be a string, not a StoragePath object."""
|
||||
|
||||
def test_test_action_response_is_string(self, api_schema: SchemaGenerator):
|
||||
op = api_schema["paths"]["/api/storage_paths/test/"]["post"]
|
||||
resp_200 = op["responses"]["200"]["content"]["application/json"]["schema"]
|
||||
assert resp_200.get("type") == "string", (
|
||||
"storage_paths_test_create 200 response must be type:string"
|
||||
)
|
||||
|
||||
def test_test_action_request_uses_storage_path_test_serializer(
|
||||
self,
|
||||
api_schema: SchemaGenerator,
|
||||
):
|
||||
op = api_schema["paths"]["/api/storage_paths/test/"]["post"]
|
||||
content = (
|
||||
op.get("requestBody", {}).get("content", {}).get("application/json", {})
|
||||
)
|
||||
schema_ref = content.get("schema", {}).get("$ref", "")
|
||||
component_name = schema_ref.split("/")[-1]
|
||||
# COMPONENT_SPLIT_REQUEST=True causes drf-spectacular to append "Request"
|
||||
# to request body component names, so StoragePathTestSerializer -> StoragePathTestRequest
|
||||
assert component_name == "StoragePathTestRequest", (
|
||||
f"Request body should reference StoragePathTestRequest, got {component_name!r}"
|
||||
)
|
||||
|
||||
|
||||
class TestProcessedMailBulkDeleteSchema:
|
||||
"""processed_mail_bulk_delete_create: response must be {result, deleted_mail_ids}."""
|
||||
|
||||
def _get_props(self, api_schema: SchemaGenerator):
|
||||
op = api_schema["paths"]["/api/processed_mail/bulk_delete/"]["post"]
|
||||
resp_200 = op["responses"]["200"]["content"]["application/json"]["schema"]
|
||||
ref = resp_200.get("$ref", "")
|
||||
component_name = ref.split("/")[-1] if ref else ""
|
||||
if component_name:
|
||||
return api_schema["components"]["schemas"][component_name]["properties"]
|
||||
return resp_200.get("properties", {})
|
||||
|
||||
@pytest.mark.parametrize("field", ["result", "deleted_mail_ids"])
|
||||
def test_bulk_delete_response_has_field(
|
||||
self,
|
||||
api_schema: SchemaGenerator,
|
||||
field: str,
|
||||
):
|
||||
props = self._get_props(api_schema)
|
||||
assert field in props, f"bulk_delete 200 response must have a '{field}' field"
|
||||
|
||||
def test_bulk_delete_response_is_not_processed_mail_serializer(self, api_schema):
|
||||
op = api_schema["paths"]["/api/processed_mail/bulk_delete/"]["post"]
|
||||
resp_200 = op["responses"]["200"]["content"]["application/json"]["schema"]
|
||||
ref = resp_200.get("$ref", "")
|
||||
component_name = ref.split("/")[-1] if ref else ""
|
||||
assert component_name != "ProcessedMail", (
|
||||
"bulk_delete 200 response must not be the full ProcessedMail serializer"
|
||||
)
|
||||
|
||||
|
||||
class TestShareLinkBundleRebuildSchema:
|
||||
"""share_link_bundles_rebuild_create: 200 returns bundle data; 400 is documented."""
|
||||
|
||||
def test_rebuild_has_400_response(self, api_schema: SchemaGenerator):
|
||||
op = api_schema["paths"]["/api/share_link_bundles/{id}/rebuild/"]["post"]
|
||||
assert "400" in op["responses"], (
|
||||
"rebuild must document the 400 response for 'Bundle is already being processed.'"
|
||||
)
|
||||
|
||||
def test_rebuild_400_has_detail_field(self, api_schema: SchemaGenerator):
|
||||
op = api_schema["paths"]["/api/share_link_bundles/{id}/rebuild/"]["post"]
|
||||
resp_400 = op["responses"]["400"]["content"]["application/json"]["schema"]
|
||||
ref = resp_400.get("$ref", "")
|
||||
component_name = ref.split("/")[-1] if ref else ""
|
||||
if component_name:
|
||||
props = api_schema["components"]["schemas"][component_name]["properties"]
|
||||
else:
|
||||
props = resp_400.get("properties", {})
|
||||
assert "detail" in props, "rebuild 400 response must have a 'detail' field"
|
||||
|
||||
@@ -299,7 +299,7 @@ class TestSystemStatus(APITestCase):
|
||||
task_type=PaperlessTask.TaskType.TRAIN_CLASSIFIER,
|
||||
trigger_source=PaperlessTask.TriggerSource.SCHEDULED,
|
||||
status=PaperlessTask.Status.FAILURE,
|
||||
result_message="Classifier training failed",
|
||||
result_data={"error_message": "Classifier training failed"},
|
||||
)
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
@@ -360,7 +360,7 @@ class TestSystemStatus(APITestCase):
|
||||
task_type=PaperlessTask.TaskType.SANITY_CHECK,
|
||||
trigger_source=PaperlessTask.TriggerSource.SCHEDULED,
|
||||
status=PaperlessTask.Status.FAILURE,
|
||||
result_message="5 issues found.",
|
||||
result_data={"error_message": "5 issues found."},
|
||||
)
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
@@ -429,7 +429,7 @@ class TestSystemStatus(APITestCase):
|
||||
task_type=PaperlessTask.TaskType.LLM_INDEX,
|
||||
trigger_source=PaperlessTask.TriggerSource.SCHEDULED,
|
||||
status=PaperlessTask.Status.FAILURE,
|
||||
result_message="AI index update failed",
|
||||
result_data={"error_message": "AI index update failed"},
|
||||
)
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
|
||||
@@ -17,8 +17,8 @@ from documents.data_models import DocumentSource
|
||||
from documents.models import Document
|
||||
from documents.models import Tag
|
||||
from documents.plugins.base import StopConsumeTaskError
|
||||
from documents.tests.utils import ConsumeTaskMixin
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from documents.tests.utils import DocumentConsumeDelayMixin
|
||||
from documents.tests.utils import DummyProgressManager
|
||||
from documents.tests.utils import FileSystemAssertsMixin
|
||||
from documents.tests.utils import SampleDirMixin
|
||||
@@ -601,7 +601,7 @@ class TestBarcodeNewConsume(
|
||||
DirectoriesMixin,
|
||||
FileSystemAssertsMixin,
|
||||
SampleDirMixin,
|
||||
DocumentConsumeDelayMixin,
|
||||
ConsumeTaskMixin,
|
||||
TestCase,
|
||||
):
|
||||
@override_settings(CONSUMER_ENABLE_BARCODES=True)
|
||||
@@ -632,7 +632,7 @@ class TestBarcodeNewConsume(
|
||||
),
|
||||
overrides,
|
||||
),
|
||||
"Barcode splitting complete!",
|
||||
{"reason": "Barcode splitting complete!"},
|
||||
)
|
||||
# 2 new document consume tasks created
|
||||
self.assertEqual(self.consume_file_mock.call_count, 2)
|
||||
@@ -646,7 +646,7 @@ class TestBarcodeNewConsume(
|
||||
for (
|
||||
new_input_doc,
|
||||
new_doc_overrides,
|
||||
) in self.get_all_consume_delay_call_args():
|
||||
) in self.get_all_consume_task_call_args():
|
||||
self.assertIsFile(new_input_doc.original_file)
|
||||
self.assertEqual(new_input_doc.original_path, temp_copy)
|
||||
self.assertEqual(new_input_doc.source, DocumentSource.ConsumeFolder)
|
||||
@@ -1049,7 +1049,7 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
|
||||
None,
|
||||
)
|
||||
|
||||
self.assertEqual(result, "Barcode splitting complete!")
|
||||
self.assertEqual(result, {"reason": "Barcode splitting complete!"})
|
||||
|
||||
documents = Document.objects.all().order_by("id")
|
||||
self.assertEqual(documents.count(), 3)
|
||||
|
||||
@@ -31,7 +31,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
self.group1 = Group.objects.create(name="group1")
|
||||
self.group2 = Group.objects.create(name="group2")
|
||||
|
||||
patcher = mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
patcher = mock.patch("documents.bulk_edit.bulk_update_documents.apply_async")
|
||||
self.async_task = patcher.start()
|
||||
self.addCleanup(patcher.stop)
|
||||
self.c1 = Correspondent.objects.create(name="c1")
|
||||
@@ -74,7 +74,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
)
|
||||
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 3)
|
||||
self.async_task.assert_called_once()
|
||||
_, kwargs = self.async_task.call_args
|
||||
kwargs = self.async_task.call_args.kwargs["kwargs"]
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||
|
||||
def test_unset_correspondent(self) -> None:
|
||||
@@ -82,7 +82,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
bulk_edit.set_correspondent([self.doc1.id, self.doc2.id, self.doc3.id], None)
|
||||
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 0)
|
||||
self.async_task.assert_called_once()
|
||||
_, kwargs = self.async_task.call_args
|
||||
kwargs = self.async_task.call_args.kwargs["kwargs"]
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
||||
|
||||
def test_set_document_type(self) -> None:
|
||||
@@ -93,7 +93,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
)
|
||||
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 3)
|
||||
self.async_task.assert_called_once()
|
||||
_, kwargs = self.async_task.call_args
|
||||
kwargs = self.async_task.call_args.kwargs["kwargs"]
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||
|
||||
def test_unset_document_type(self) -> None:
|
||||
@@ -101,7 +101,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
bulk_edit.set_document_type([self.doc1.id, self.doc2.id, self.doc3.id], None)
|
||||
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 0)
|
||||
self.async_task.assert_called_once()
|
||||
_, kwargs = self.async_task.call_args
|
||||
kwargs = self.async_task.call_args.kwargs["kwargs"]
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
||||
|
||||
def test_set_document_storage_path(self) -> None:
|
||||
@@ -123,7 +123,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(Document.objects.filter(storage_path=None).count(), 4)
|
||||
|
||||
self.async_task.assert_called_once()
|
||||
_, kwargs = self.async_task.call_args
|
||||
kwargs = self.async_task.call_args.kwargs["kwargs"]
|
||||
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
|
||||
|
||||
@@ -154,7 +154,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(Document.objects.filter(storage_path=None).count(), 5)
|
||||
|
||||
self.async_task.assert_called()
|
||||
_, kwargs = self.async_task.call_args
|
||||
kwargs = self.async_task.call_args.kwargs["kwargs"]
|
||||
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
|
||||
|
||||
@@ -166,7 +166,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
)
|
||||
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 4)
|
||||
self.async_task.assert_called_once()
|
||||
_, kwargs = self.async_task.call_args
|
||||
kwargs = self.async_task.call_args.kwargs["kwargs"]
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc3.id])
|
||||
|
||||
def test_remove_tag(self) -> None:
|
||||
@@ -174,7 +174,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
bulk_edit.remove_tag([self.doc1.id, self.doc3.id, self.doc4.id], self.t1.id)
|
||||
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 1)
|
||||
self.async_task.assert_called_once()
|
||||
_, kwargs = self.async_task.call_args
|
||||
kwargs = self.async_task.call_args.kwargs["kwargs"]
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc4.id])
|
||||
|
||||
def test_modify_tags(self) -> None:
|
||||
@@ -191,7 +191,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
self.assertCountEqual(list(self.doc3.tags.all()), [self.t2, tag_unrelated])
|
||||
|
||||
self.async_task.assert_called_once()
|
||||
_, kwargs = self.async_task.call_args
|
||||
kwargs = self.async_task.call_args.kwargs["kwargs"]
|
||||
# TODO: doc3 should not be affected, but the query for that is rather complicated
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
||||
|
||||
@@ -248,7 +248,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
)
|
||||
|
||||
self.async_task.assert_called_once()
|
||||
_, kwargs = self.async_task.call_args
|
||||
kwargs = self.async_task.call_args.kwargs["kwargs"]
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||
|
||||
def test_modify_custom_fields_with_values(self) -> None:
|
||||
@@ -325,7 +325,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
)
|
||||
|
||||
self.async_task.assert_called_once()
|
||||
_, kwargs = self.async_task.call_args
|
||||
kwargs = self.async_task.call_args.kwargs["kwargs"]
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||
|
||||
# removal of document link cf, should also remove symmetric link
|
||||
@@ -428,7 +428,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(source_doc.id, version2.id)
|
||||
self.assertNotEqual(source_doc.id, version1.id)
|
||||
|
||||
@mock.patch("documents.tasks.bulk_update_documents.delay")
|
||||
@mock.patch("documents.tasks.bulk_update_documents.apply_async")
|
||||
def test_set_permissions(self, m) -> None:
|
||||
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
|
||||
|
||||
@@ -467,7 +467,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
)
|
||||
self.assertEqual(groups_with_perms.count(), 1)
|
||||
|
||||
@mock.patch("documents.tasks.bulk_update_documents.delay")
|
||||
@mock.patch("documents.tasks.bulk_update_documents.apply_async")
|
||||
def test_set_permissions_merge(self, m) -> None:
|
||||
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
|
||||
|
||||
@@ -643,20 +643,20 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
)
|
||||
|
||||
mock_consume_file.assert_called()
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
call_kwargs = mock_consume_file.call_args.kwargs
|
||||
self.assertEqual(
|
||||
Path(consume_file_args[0].original_file).name,
|
||||
Path(call_kwargs["input_doc"].original_file).name,
|
||||
expected_filename,
|
||||
)
|
||||
self.assertEqual(consume_file_args[1].title, None)
|
||||
self.assertEqual(call_kwargs["overrides"].title, None)
|
||||
# No metadata_document_id, delete_originals False, so ASN should be None
|
||||
self.assertIsNone(consume_file_args[1].asn)
|
||||
self.assertIsNone(call_kwargs["overrides"].asn)
|
||||
|
||||
# With metadata_document_id overrides
|
||||
result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id)
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].title, "B (merged)")
|
||||
self.assertEqual(consume_file_args[1].created, self.doc2.created)
|
||||
call_kwargs = mock_consume_file.call_args.kwargs
|
||||
self.assertEqual(call_kwargs["overrides"].title, "B (merged)")
|
||||
self.assertEqual(call_kwargs["overrides"].created, self.doc2.created)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@@ -720,16 +720,15 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
|
||||
mock_consume_file.assert_called()
|
||||
mock_delete_documents.assert_called()
|
||||
consume_sig = mock_consume_file.return_value
|
||||
consume_sig.apply_async.assert_called_once()
|
||||
mock_consume_file.return_value.set.return_value.apply_async.assert_called_once()
|
||||
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
call_kwargs = mock_consume_file.call_args.kwargs
|
||||
self.assertEqual(
|
||||
Path(consume_file_args[0].original_file).name,
|
||||
Path(call_kwargs["input_doc"].original_file).name,
|
||||
expected_filename,
|
||||
)
|
||||
self.assertEqual(consume_file_args[1].title, None)
|
||||
self.assertEqual(consume_file_args[1].asn, 101)
|
||||
self.assertEqual(call_kwargs["overrides"].title, None)
|
||||
self.assertEqual(call_kwargs["overrides"].asn, 101)
|
||||
|
||||
delete_documents_args, _ = mock_delete_documents.call_args
|
||||
self.assertEqual(
|
||||
@@ -764,7 +763,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.doc1.archive_serial_number = 111
|
||||
self.doc1.save()
|
||||
sig = mock.Mock()
|
||||
sig.apply_async.side_effect = Exception("boom")
|
||||
sig.set.return_value.apply_async.side_effect = Exception("boom")
|
||||
mock_consume_file.return_value = sig
|
||||
|
||||
with self.assertRaises(Exception):
|
||||
@@ -801,8 +800,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
)
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].asn, 202)
|
||||
self.assertEqual(mock_consume_file.call_args.kwargs["overrides"].asn, 202)
|
||||
|
||||
def test_restore_archive_serial_numbers_task(self) -> None:
|
||||
"""
|
||||
@@ -843,9 +841,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
)
|
||||
|
||||
mock_consume_file.assert_called()
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(
|
||||
Path(consume_file_args[0].original_file).name,
|
||||
Path(mock_consume_file.call_args.kwargs["input_doc"].original_file).name,
|
||||
expected_filename,
|
||||
)
|
||||
|
||||
@@ -889,9 +886,11 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
user = User.objects.create(username="test_user")
|
||||
result = bulk_edit.split(doc_ids, pages, delete_originals=False, user=user)
|
||||
self.assertEqual(mock_consume_file.call_count, 2)
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].title, "B (split 2)")
|
||||
self.assertIsNone(consume_file_args[1].asn)
|
||||
self.assertEqual(
|
||||
mock_consume_file.call_args.kwargs["overrides"].title,
|
||||
"B (split 2)",
|
||||
)
|
||||
self.assertIsNone(mock_consume_file.call_args.kwargs["overrides"].asn)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@@ -953,8 +952,10 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
self.assertEqual(mock_consume_file.call_count, 2)
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].title, "B (split 2)")
|
||||
self.assertEqual(
|
||||
mock_consume_file.call_args.kwargs["overrides"].title,
|
||||
"B (split 2)",
|
||||
)
|
||||
|
||||
mock_delete_documents.assert_called()
|
||||
mock_chord.assert_called_once()
|
||||
@@ -1001,7 +1002,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertEqual(self.doc2.archive_serial_number, 222)
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_split_with_errors(self, mock_save_pdf, mock_consume_file) -> None:
|
||||
"""
|
||||
@@ -1025,7 +1026,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
|
||||
mock_consume_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
def test_rotate(self, mock_consume_delay):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -1042,12 +1043,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_consume_delay.call_args_list,
|
||||
doc_ids,
|
||||
):
|
||||
consumable, overrides = call.args
|
||||
self.assertEqual(consumable.root_document_id, expected_id)
|
||||
self.assertIsNotNone(overrides)
|
||||
task_kwargs = call.kwargs["kwargs"]
|
||||
self.assertEqual(task_kwargs["input_doc"].root_document_id, expected_id)
|
||||
self.assertIsNotNone(task_kwargs["overrides"])
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_rotate_with_error(
|
||||
self,
|
||||
@@ -1073,7 +1074,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertIn(expected_str, error_str)
|
||||
mock_consume_delay.assert_not_called()
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
def test_rotate_non_pdf(
|
||||
self,
|
||||
mock_consume_delay,
|
||||
@@ -1091,13 +1092,13 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
expected_str = f"Document {self.img_doc.id} is not a PDF, skipping rotation"
|
||||
self.assertTrue(any(expected_str in line for line in cm.output))
|
||||
self.assertEqual(mock_consume_delay.call_count, 1)
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
self.assertEqual(consumable.root_document_id, self.doc2.id)
|
||||
self.assertIsNotNone(overrides)
|
||||
task_kwargs = mock_consume_delay.call_args.kwargs["kwargs"]
|
||||
self.assertEqual(task_kwargs["input_doc"].root_document_id, self.doc2.id)
|
||||
self.assertIsNotNone(task_kwargs["overrides"])
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_rotate_explicit_selection_uses_root_source_when_root_selected(
|
||||
self,
|
||||
@@ -1124,7 +1125,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_open.assert_called_once_with(self.doc2.source_path)
|
||||
mock_consume_delay.assert_called_once()
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
def test_delete_pages(self, mock_magic, mock_pdf_save, mock_consume_delay):
|
||||
@@ -1142,14 +1143,16 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
result = bulk_edit.delete_pages(doc_ids, pages)
|
||||
mock_pdf_save.assert_called_once()
|
||||
mock_consume_delay.assert_called_once()
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
self.assertEqual(consumable.root_document_id, self.doc2.id)
|
||||
self.assertTrue(str(consumable.original_file).endswith("_pages_deleted.pdf"))
|
||||
self.assertIsNotNone(overrides)
|
||||
task_kwargs = mock_consume_delay.call_args.kwargs["kwargs"]
|
||||
self.assertEqual(task_kwargs["input_doc"].root_document_id, self.doc2.id)
|
||||
self.assertTrue(
|
||||
str(task_kwargs["input_doc"].original_file).endswith("_pages_deleted.pdf"),
|
||||
)
|
||||
self.assertIsNotNone(task_kwargs["overrides"])
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_delete_pages_explicit_selection_uses_root_source_when_root_selected(
|
||||
self,
|
||||
@@ -1176,7 +1179,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_open.assert_called_once_with(self.doc2.source_path)
|
||||
mock_consume_delay.assert_called_once()
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_delete_pages_with_error(self, mock_pdf_save, mock_consume_delay):
|
||||
"""
|
||||
@@ -1259,8 +1262,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
result = bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
|
||||
self.assertEqual(result, "OK")
|
||||
mock_chord.assert_called_once()
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].asn, 250)
|
||||
self.assertEqual(mock_consume_file.call_args.kwargs["overrides"].asn, 250)
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertIsNone(self.doc2.archive_serial_number)
|
||||
|
||||
@@ -1297,7 +1299,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertEqual(self.doc2.archive_serial_number, 333)
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
def test_edit_pdf_with_update_document(self, mock_consume_delay):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -1319,13 +1321,15 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_consume_delay.assert_called_once()
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
self.assertEqual(consumable.root_document_id, self.doc2.id)
|
||||
self.assertTrue(str(consumable.original_file).endswith("_edited.pdf"))
|
||||
self.assertIsNotNone(overrides)
|
||||
task_kwargs = mock_consume_delay.call_args.kwargs["kwargs"]
|
||||
self.assertEqual(task_kwargs["input_doc"].root_document_id, self.doc2.id)
|
||||
self.assertTrue(
|
||||
str(task_kwargs["input_doc"].original_file).endswith("_edited.pdf"),
|
||||
)
|
||||
self.assertIsNotNone(task_kwargs["overrides"])
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
@mock.patch("pikepdf.new")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_edit_pdf_explicit_selection_uses_root_source_when_root_selected(
|
||||
@@ -1433,7 +1437,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_consume_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.update_document_content_maybe_archive_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
@mock.patch("documents.bulk_edit.tempfile.mkdtemp")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_update_document(
|
||||
@@ -1468,18 +1472,18 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
fake_pdf.remove_unreferenced_resources.assert_called_once()
|
||||
mock_update_document.assert_not_called()
|
||||
mock_consume_delay.assert_called_once()
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
task_kwargs = mock_consume_delay.call_args.kwargs["kwargs"]
|
||||
expected_path = temp_dir / f"{doc.id}_unprotected.pdf"
|
||||
self.assertTrue(expected_path.exists())
|
||||
self.assertEqual(
|
||||
Path(consumable.original_file).resolve(),
|
||||
Path(task_kwargs["input_doc"].original_file).resolve(),
|
||||
expected_path.resolve(),
|
||||
)
|
||||
self.assertEqual(consumable.root_document_id, doc.id)
|
||||
self.assertIsNotNone(overrides)
|
||||
self.assertEqual(task_kwargs["input_doc"].root_document_id, doc.id)
|
||||
self.assertIsNotNone(task_kwargs["overrides"])
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.apply_async")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_explicit_selection_uses_root_source_when_root_selected(
|
||||
self,
|
||||
@@ -1548,9 +1552,9 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(doc.source_path, password="secret")
|
||||
mock_consume_file.assert_called_once()
|
||||
consume_args, _ = mock_consume_file.call_args
|
||||
consumable_document = consume_args[0]
|
||||
overrides = consume_args[1]
|
||||
call_kwargs = mock_consume_file.call_args.kwargs
|
||||
consumable_document = call_kwargs["input_doc"]
|
||||
overrides = call_kwargs["overrides"]
|
||||
expected_path = temp_dir / f"{doc.id}_unprotected.pdf"
|
||||
self.assertTrue(expected_path.exists())
|
||||
self.assertEqual(
|
||||
@@ -1558,7 +1562,9 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
expected_path.resolve(),
|
||||
)
|
||||
self.assertEqual(overrides.owner_id, user.id)
|
||||
mock_group.assert_called_once_with([mock_consume_file.return_value])
|
||||
mock_group.assert_called_once_with(
|
||||
[mock_consume_file.return_value.set.return_value],
|
||||
)
|
||||
mock_group.return_value.delay.assert_called_once()
|
||||
mock_chord.assert_not_called()
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
dt.datetime.now(),
|
||||
delta=dt.timedelta(seconds=5),
|
||||
)
|
||||
self.assertIn("Received odd numbered pages", msg)
|
||||
self.assertIn("Received odd numbered pages", msg["reason"])
|
||||
|
||||
def test_collation(self) -> None:
|
||||
"""
|
||||
@@ -129,7 +129,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
)
|
||||
msg = self.consume_file("double-sided-odd.pdf")
|
||||
self.assertIsFile(self.staging_file)
|
||||
self.assertIn("Received odd numbered pages", msg)
|
||||
self.assertIn("Received odd numbered pages", msg["reason"])
|
||||
|
||||
def test_less_odd_pages_then_even_fails(self) -> None:
|
||||
"""
|
||||
@@ -212,7 +212,7 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
"""
|
||||
msg = self.consume_file("simple.pdf", Path("..") / "simple.pdf")
|
||||
self.assertIsNotFile(self.staging_file)
|
||||
self.assertRegex(msg, r"Success. New document id \d+ created")
|
||||
self.assertIsInstance(msg.get("document_id"), int)
|
||||
|
||||
def test_subdirectory_upload(self) -> None:
|
||||
"""
|
||||
@@ -252,4 +252,4 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
"""
|
||||
msg = self.consume_file("simple.pdf")
|
||||
self.assertIsNotFile(self.staging_file)
|
||||
self.assertRegex(msg, r"Success. New document id \d+ created")
|
||||
self.assertIsInstance(msg.get("document_id"), int)
|
||||
|
||||
@@ -97,12 +97,10 @@ def consumer_filter() -> ConsumerFilter:
|
||||
|
||||
@pytest.fixture
|
||||
def mock_consume_file_delay(mocker: MockerFixture) -> MagicMock:
|
||||
"""Mock the consume_file.delay celery task."""
|
||||
mock_task = mocker.patch(
|
||||
"""Mock the consume_file task."""
|
||||
return mocker.patch(
|
||||
"documents.management.commands.document_consumer.consume_file",
|
||||
)
|
||||
mock_task.delay = mocker.MagicMock()
|
||||
return mock_task
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -453,9 +451,9 @@ class TestConsumeFile:
|
||||
subdirs_as_tags=False,
|
||||
)
|
||||
|
||||
mock_consume_file_delay.delay.assert_called_once()
|
||||
call_args = mock_consume_file_delay.delay.call_args
|
||||
consumable_doc = call_args[0][0]
|
||||
mock_consume_file_delay.apply_async.assert_called_once()
|
||||
call_args = mock_consume_file_delay.apply_async.call_args
|
||||
consumable_doc = call_args.kwargs["kwargs"]["input_doc"]
|
||||
assert isinstance(consumable_doc, ConsumableDocument)
|
||||
assert consumable_doc.original_file == target
|
||||
assert consumable_doc.source == DocumentSource.ConsumeFolder
|
||||
@@ -471,7 +469,7 @@ class TestConsumeFile:
|
||||
consumption_dir=consumption_dir,
|
||||
subdirs_as_tags=False,
|
||||
)
|
||||
mock_consume_file_delay.delay.assert_not_called()
|
||||
mock_consume_file_delay.apply_async.assert_not_called()
|
||||
|
||||
def test_consume_directory(
|
||||
self,
|
||||
@@ -487,7 +485,7 @@ class TestConsumeFile:
|
||||
consumption_dir=consumption_dir,
|
||||
subdirs_as_tags=False,
|
||||
)
|
||||
mock_consume_file_delay.delay.assert_not_called()
|
||||
mock_consume_file_delay.apply_async.assert_not_called()
|
||||
|
||||
def test_consume_with_permission_error(
|
||||
self,
|
||||
@@ -506,7 +504,7 @@ class TestConsumeFile:
|
||||
consumption_dir=consumption_dir,
|
||||
subdirs_as_tags=False,
|
||||
)
|
||||
mock_consume_file_delay.delay.assert_not_called()
|
||||
mock_consume_file_delay.apply_async.assert_not_called()
|
||||
|
||||
def test_consume_with_tags_error(
|
||||
self,
|
||||
@@ -529,9 +527,9 @@ class TestConsumeFile:
|
||||
consumption_dir=consumption_dir,
|
||||
subdirs_as_tags=True,
|
||||
)
|
||||
mock_consume_file_delay.delay.assert_called_once()
|
||||
call_args = mock_consume_file_delay.delay.call_args
|
||||
overrides = call_args[0][1]
|
||||
mock_consume_file_delay.apply_async.assert_called_once()
|
||||
call_args = mock_consume_file_delay.apply_async.call_args
|
||||
overrides = call_args.kwargs["kwargs"]["overrides"]
|
||||
assert overrides.tag_ids is None
|
||||
|
||||
|
||||
@@ -629,7 +627,7 @@ class TestCommandOneshot:
|
||||
cmd = Command()
|
||||
cmd.handle(directory=str(consumption_dir), oneshot=True, testing=False)
|
||||
|
||||
mock_consume_file_delay.delay.assert_called_once()
|
||||
mock_consume_file_delay.apply_async.assert_called_once()
|
||||
|
||||
def test_processes_recursive(
|
||||
self,
|
||||
@@ -652,7 +650,7 @@ class TestCommandOneshot:
|
||||
cmd = Command()
|
||||
cmd.handle(directory=str(consumption_dir), oneshot=True, testing=False)
|
||||
|
||||
mock_consume_file_delay.delay.assert_called_once()
|
||||
mock_consume_file_delay.apply_async.assert_called_once()
|
||||
|
||||
def test_ignores_unsupported_extensions(
|
||||
self,
|
||||
@@ -671,7 +669,7 @@ class TestCommandOneshot:
|
||||
cmd = Command()
|
||||
cmd.handle(directory=str(consumption_dir), oneshot=True, testing=False)
|
||||
|
||||
mock_consume_file_delay.delay.assert_not_called()
|
||||
mock_consume_file_delay.apply_async.assert_not_called()
|
||||
|
||||
|
||||
class ConsumerThread(Thread):
|
||||
@@ -795,12 +793,12 @@ class TestCommandWatch:
|
||||
target = consumption_dir / "document.pdf"
|
||||
shutil.copy(sample_pdf, target)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
wait_for_mock_call(mock_consume_file_delay.apply_async, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
|
||||
mock_consume_file_delay.delay.assert_called()
|
||||
mock_consume_file_delay.apply_async.assert_called()
|
||||
|
||||
def test_detects_moved_file(
|
||||
self,
|
||||
@@ -821,12 +819,12 @@ class TestCommandWatch:
|
||||
target = consumption_dir / "document.pdf"
|
||||
shutil.move(temp_location, target)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
wait_for_mock_call(mock_consume_file_delay.apply_async, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
|
||||
mock_consume_file_delay.delay.assert_called()
|
||||
mock_consume_file_delay.apply_async.assert_called()
|
||||
|
||||
def test_handles_slow_write(
|
||||
self,
|
||||
@@ -847,12 +845,12 @@ class TestCommandWatch:
|
||||
f.flush()
|
||||
sleep(0.05)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
wait_for_mock_call(mock_consume_file_delay.apply_async, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
|
||||
mock_consume_file_delay.delay.assert_called()
|
||||
mock_consume_file_delay.apply_async.assert_called()
|
||||
|
||||
def test_ignores_macos_files(
|
||||
self,
|
||||
@@ -868,13 +866,15 @@ class TestCommandWatch:
|
||||
(consumption_dir / "._document.pdf").write_bytes(b"test")
|
||||
shutil.copy(sample_pdf, consumption_dir / "valid.pdf")
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
wait_for_mock_call(mock_consume_file_delay.apply_async, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
|
||||
assert mock_consume_file_delay.delay.call_count == 1
|
||||
call_args = mock_consume_file_delay.delay.call_args[0][0]
|
||||
assert mock_consume_file_delay.apply_async.call_count == 1
|
||||
call_args = mock_consume_file_delay.apply_async.call_args.kwargs["kwargs"][
|
||||
"input_doc"
|
||||
]
|
||||
assert call_args.original_file.name == "valid.pdf"
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -924,12 +924,12 @@ class TestCommandWatchPolling:
|
||||
|
||||
# Actively wait for consumption
|
||||
# Polling needs: interval (0.5s) + stability (0.1s) + next poll (0.5s) + margin
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=5.0)
|
||||
wait_for_mock_call(mock_consume_file_delay.apply_async, timeout_s=5.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
|
||||
mock_consume_file_delay.delay.assert_called()
|
||||
mock_consume_file_delay.apply_async.assert_called()
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
@@ -953,12 +953,12 @@ class TestCommandWatchRecursive:
|
||||
target = subdir / "document.pdf"
|
||||
shutil.copy(sample_pdf, target)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
wait_for_mock_call(mock_consume_file_delay.apply_async, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
|
||||
mock_consume_file_delay.delay.assert_called()
|
||||
mock_consume_file_delay.apply_async.assert_called()
|
||||
|
||||
def test_subdirs_as_tags(
|
||||
self,
|
||||
@@ -983,15 +983,15 @@ class TestCommandWatchRecursive:
|
||||
target = subdir / "document.pdf"
|
||||
shutil.copy(sample_pdf, target)
|
||||
|
||||
wait_for_mock_call(mock_consume_file_delay.delay, timeout_s=2.0)
|
||||
wait_for_mock_call(mock_consume_file_delay.apply_async, timeout_s=2.0)
|
||||
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
|
||||
mock_consume_file_delay.delay.assert_called()
|
||||
mock_consume_file_delay.apply_async.assert_called()
|
||||
mock_tags.assert_called()
|
||||
call_args = mock_consume_file_delay.delay.call_args
|
||||
overrides = call_args[0][1]
|
||||
call_args = mock_consume_file_delay.apply_async.call_args
|
||||
overrides = call_args.kwargs["kwargs"]["overrides"]
|
||||
assert overrides.tag_ids is not None
|
||||
assert len(overrides.tag_ids) == 2
|
||||
|
||||
@@ -1021,7 +1021,7 @@ class TestCommandWatchEdgeCases:
|
||||
if thread.exception:
|
||||
raise thread.exception
|
||||
|
||||
mock_consume_file_delay.delay.assert_not_called()
|
||||
mock_consume_file_delay.apply_async.assert_not_called()
|
||||
|
||||
@pytest.mark.usefixtures("mock_supported_extensions")
|
||||
def test_handles_task_exception(
|
||||
@@ -1035,7 +1035,7 @@ class TestCommandWatchEdgeCases:
|
||||
mock_task = mocker.patch(
|
||||
"documents.management.commands.document_consumer.consume_file",
|
||||
)
|
||||
mock_task.delay.side_effect = Exception("Task error")
|
||||
mock_task.apply_async.side_effect = Exception("Task error")
|
||||
|
||||
thread = ConsumerThread(consumption_dir, scratch_dir)
|
||||
try:
|
||||
|
||||
@@ -31,7 +31,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
|
||||
self.client.force_authenticate(self.user)
|
||||
self.document = DocumentFactory.create()
|
||||
|
||||
@mock.patch("documents.views.build_share_link_bundle.delay")
|
||||
@mock.patch("documents.views.build_share_link_bundle.apply_async")
|
||||
def test_create_bundle_triggers_build_job(self, delay_mock) -> None:
|
||||
payload = {
|
||||
"document_ids": [self.document.pk],
|
||||
@@ -45,7 +45,8 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
|
||||
bundle = ShareLinkBundle.objects.get(pk=response.data["id"])
|
||||
self.assertEqual(bundle.documents.count(), 1)
|
||||
self.assertEqual(bundle.status, ShareLinkBundle.Status.PENDING)
|
||||
delay_mock.assert_called_once_with(bundle.pk)
|
||||
delay_mock.assert_called_once()
|
||||
self.assertEqual(delay_mock.call_args.kwargs["kwargs"]["bundle_id"], bundle.pk)
|
||||
|
||||
def test_create_bundle_rejects_missing_documents(self) -> None:
|
||||
payload = {
|
||||
@@ -73,7 +74,7 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
|
||||
self.assertIn("document_ids", response.data)
|
||||
perms_mock.assert_called()
|
||||
|
||||
@mock.patch("documents.views.build_share_link_bundle.delay")
|
||||
@mock.patch("documents.views.build_share_link_bundle.apply_async")
|
||||
def test_rebuild_bundle_resets_state(self, delay_mock) -> None:
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
slug="rebuild-slug",
|
||||
@@ -94,7 +95,8 @@ class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
|
||||
self.assertIsNone(bundle.last_error)
|
||||
self.assertIsNone(bundle.size_bytes)
|
||||
self.assertEqual(bundle.file_path, "")
|
||||
delay_mock.assert_called_once_with(bundle.pk)
|
||||
delay_mock.assert_called_once()
|
||||
self.assertEqual(delay_mock.call_args.kwargs["kwargs"]["bundle_id"], bundle.pk)
|
||||
|
||||
def test_rebuild_bundle_rejects_processing_status(self) -> None:
|
||||
bundle = ShareLinkBundle.objects.create(
|
||||
|
||||
@@ -23,7 +23,7 @@ class TestTagHierarchy(DirectoriesMixin, APITestCase):
|
||||
self.parent = Tag.objects.create(name="Parent")
|
||||
self.child = Tag.objects.create(name="Child", tn_parent=self.parent)
|
||||
|
||||
patcher = mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
patcher = mock.patch("documents.bulk_edit.bulk_update_documents.apply_async")
|
||||
self.async_task = patcher.start()
|
||||
self.addCleanup(patcher.stop)
|
||||
|
||||
|
||||
@@ -59,8 +59,9 @@ class TestBeforeTaskPublishHandler:
|
||||
def test_creates_task_for_consume_file(self, consume_input_doc, consume_overrides):
|
||||
task_id = send_publish(
|
||||
"documents.tasks.consume_file",
|
||||
(consume_input_doc, consume_overrides),
|
||||
{},
|
||||
(),
|
||||
{"input_doc": consume_input_doc, "overrides": consume_overrides},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.WEB_UI},
|
||||
)
|
||||
task = PaperlessTask.objects.get(task_id=task_id)
|
||||
assert task.task_type == PaperlessTask.TaskType.CONSUME_FILE
|
||||
@@ -102,8 +103,8 @@ class TestBeforeTaskPublishHandler:
|
||||
|
||||
task_id = send_publish(
|
||||
"documents.tasks.consume_file",
|
||||
(consume_input_doc, overrides),
|
||||
{},
|
||||
(),
|
||||
{"input_doc": consume_input_doc, "overrides": overrides},
|
||||
)
|
||||
|
||||
task = PaperlessTask.objects.get(task_id=task_id)
|
||||
@@ -116,8 +117,8 @@ class TestBeforeTaskPublishHandler:
|
||||
|
||||
task_id = send_publish(
|
||||
"documents.tasks.consume_file",
|
||||
(consume_input_doc, overrides),
|
||||
{},
|
||||
(),
|
||||
{"input_doc": consume_input_doc, "overrides": overrides},
|
||||
)
|
||||
|
||||
task = PaperlessTask.objects.get(task_id=task_id)
|
||||
@@ -167,37 +168,19 @@ class TestBeforeTaskPublishHandler:
|
||||
before_task_publish_handler(sender=None, headers=None, body=None)
|
||||
assert PaperlessTask.objects.count() == 0
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("document_source", "expected_trigger_source"),
|
||||
[
|
||||
pytest.param(
|
||||
DocumentSource.ConsumeFolder,
|
||||
PaperlessTask.TriggerSource.FOLDER_CONSUME,
|
||||
id="folder_consume",
|
||||
),
|
||||
pytest.param(
|
||||
DocumentSource.MailFetch,
|
||||
PaperlessTask.TriggerSource.EMAIL_CONSUME,
|
||||
id="email_consume",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_consume_document_source_maps_to_trigger_source(
|
||||
def test_consume_file_without_trigger_source_header_defaults_to_manual(
|
||||
self,
|
||||
consume_input_doc,
|
||||
consume_overrides,
|
||||
document_source: DocumentSource,
|
||||
expected_trigger_source: PaperlessTask.TriggerSource,
|
||||
) -> None:
|
||||
"""DocumentSource on the input doc maps to the correct TriggerSource."""
|
||||
consume_input_doc.source = document_source
|
||||
"""Without a trigger_source header the handler defaults to MANUAL."""
|
||||
task_id = send_publish(
|
||||
"documents.tasks.consume_file",
|
||||
(consume_input_doc, consume_overrides),
|
||||
{},
|
||||
(),
|
||||
{"input_doc": consume_input_doc, "overrides": consume_overrides},
|
||||
)
|
||||
task = PaperlessTask.objects.get(task_id=task_id)
|
||||
assert task.trigger_source == expected_trigger_source
|
||||
assert task.trigger_source == PaperlessTask.TriggerSource.MANUAL
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -255,30 +238,38 @@ class TestTaskPostrunHandler:
|
||||
task.refresh_from_db()
|
||||
assert task.status == PaperlessTask.Status.STARTED
|
||||
|
||||
def test_parses_legacy_new_document_string(self):
|
||||
task = self._started_task()
|
||||
def test_records_success_with_consume_result(self):
|
||||
"""ConsumeFileSuccessResult dict is stored directly as result_data."""
|
||||
from documents.data_models import ConsumeFileSuccessResult
|
||||
|
||||
task = self._started_task()
|
||||
task_postrun_handler(
|
||||
task_id=task.task_id,
|
||||
retval="New document id 42 created",
|
||||
retval=ConsumeFileSuccessResult(document_id=42),
|
||||
state="SUCCESS",
|
||||
)
|
||||
task.refresh_from_db()
|
||||
assert task.result_data["document_id"] == 42
|
||||
assert task.result_message == "New document id 42 created"
|
||||
assert task.result_data == {"document_id": 42}
|
||||
|
||||
def test_records_stopped_with_reason(self):
|
||||
"""ConsumeFileStoppedResult dict is stored directly as result_data."""
|
||||
from documents.data_models import ConsumeFileStoppedResult
|
||||
|
||||
def test_parses_duplicate_string(self):
|
||||
"""Duplicate detection returns a string with SUCCESS state (StopConsumeTaskError is caught and returned, not raised)."""
|
||||
task = self._started_task()
|
||||
|
||||
task_postrun_handler(
|
||||
task_id=task.task_id,
|
||||
retval="It is a duplicate of some document (#99).",
|
||||
retval=ConsumeFileStoppedResult(reason="Barcode splitting complete!"),
|
||||
state="SUCCESS",
|
||||
)
|
||||
task.refresh_from_db()
|
||||
assert task.result_data["duplicate_of"] == 99
|
||||
assert task.result_data["duplicate_in_trash"] is False
|
||||
assert task.result_data == {"reason": "Barcode splitting complete!"}
|
||||
|
||||
def test_none_retval_stores_no_result_data(self):
|
||||
"""None return value (non-consume tasks) leaves result_data untouched."""
|
||||
task = self._started_task()
|
||||
task_postrun_handler(task_id=task.task_id, retval=None, state="SUCCESS")
|
||||
task.refresh_from_db()
|
||||
assert task.result_data is None
|
||||
|
||||
def test_ignores_unknown_task_id(self):
|
||||
|
||||
|
||||
@@ -3636,7 +3636,7 @@ class TestWorkflows(
|
||||
PAPERLESS_FORCE_SCRIPT_NAME="/paperless",
|
||||
BASE_URL="/paperless/",
|
||||
)
|
||||
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
|
||||
@mock.patch("documents.workflows.webhooks.send_webhook.apply_async")
|
||||
def test_workflow_webhook_action_body(self, mock_post) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -3685,20 +3685,22 @@ class TestWorkflows(
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
mock_post.assert_called_once_with(
|
||||
url="http://paperless-ngx.com",
|
||||
data=(
|
||||
f"Test message: http://localhost:8000/paperless/documents/{doc.id}/"
|
||||
f" with id {doc.id}"
|
||||
),
|
||||
headers={},
|
||||
files=None,
|
||||
as_json=False,
|
||||
kwargs={
|
||||
"url": "http://paperless-ngx.com",
|
||||
"data": (
|
||||
f"Test message: http://localhost:8000/paperless/documents/{doc.id}/"
|
||||
f" with id {doc.id}"
|
||||
),
|
||||
"headers": {},
|
||||
"files": None,
|
||||
"as_json": False,
|
||||
},
|
||||
)
|
||||
|
||||
@override_settings(
|
||||
PAPERLESS_URL="http://localhost:8000",
|
||||
)
|
||||
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
|
||||
@mock.patch("documents.workflows.webhooks.send_webhook.apply_async")
|
||||
def test_workflow_webhook_action_w_files(self, mock_post) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -3750,11 +3752,13 @@ class TestWorkflows(
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
mock_post.assert_called_once_with(
|
||||
url="http://paperless-ngx.com",
|
||||
data=f"Test message: http://localhost:8000/documents/{doc.id}/",
|
||||
headers={},
|
||||
files={"file": ("simple.pdf", mock.ANY, "application/pdf")},
|
||||
as_json=False,
|
||||
kwargs={
|
||||
"url": "http://paperless-ngx.com",
|
||||
"data": f"Test message: http://localhost:8000/documents/{doc.id}/",
|
||||
"headers": {},
|
||||
"files": {"file": ("simple.pdf", mock.ANY, "application/pdf")},
|
||||
"as_json": False,
|
||||
},
|
||||
)
|
||||
|
||||
@mock.patch("documents.signals.handlers.execute_webhook_action")
|
||||
@@ -4036,7 +4040,7 @@ class TestWorkflows(
|
||||
)
|
||||
self.assertIn(expected_str, cm.output[0])
|
||||
|
||||
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
|
||||
@mock.patch("documents.workflows.webhooks.send_webhook.apply_async")
|
||||
def test_workflow_webhook_action_consumption(self, mock_post) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -4376,7 +4380,7 @@ class TestWorkflows(
|
||||
@override_settings(
|
||||
PAPERLESS_URL="http://localhost:8000",
|
||||
)
|
||||
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
|
||||
@mock.patch("documents.workflows.webhooks.send_webhook.apply_async")
|
||||
def test_workflow_trash_with_webhook_action(self, mock_webhook_delay):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -4384,7 +4388,7 @@ class TestWorkflows(
|
||||
WHEN:
|
||||
- Document matches and workflow runs
|
||||
THEN:
|
||||
- Webhook .delay() is called with complete data including file bytes
|
||||
- Webhook .apply_async() is called with complete data including file bytes
|
||||
- Document is moved to trash (soft deleted)
|
||||
- Webhook task has all necessary data and doesn't rely on document existence
|
||||
"""
|
||||
@@ -4434,7 +4438,7 @@ class TestWorkflows(
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
mock_webhook_delay.assert_called_once()
|
||||
call_kwargs = mock_webhook_delay.call_args[1]
|
||||
call_kwargs = mock_webhook_delay.call_args[1]["kwargs"]
|
||||
self.assertEqual(call_kwargs["url"], "https://paperless-ngx.com/webhook")
|
||||
self.assertEqual(
|
||||
call_kwargs["data"],
|
||||
|
||||
@@ -231,14 +231,16 @@ class ConsumerProgressMixin:
|
||||
self.send_progress_patcher.stop()
|
||||
|
||||
|
||||
class DocumentConsumeDelayMixin:
|
||||
class ConsumeTaskMixin:
|
||||
"""
|
||||
Provides mocking of the consume_file asynchronous task and useful utilities
|
||||
for decoding its arguments
|
||||
"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.consume_file_patcher = mock.patch("documents.tasks.consume_file.delay")
|
||||
self.consume_file_patcher = mock.patch(
|
||||
"documents.tasks.consume_file.apply_async",
|
||||
)
|
||||
self.consume_file_mock = self.consume_file_patcher.start()
|
||||
super().setUp()
|
||||
|
||||
@@ -246,48 +248,22 @@ class DocumentConsumeDelayMixin:
|
||||
super().tearDown()
|
||||
self.consume_file_patcher.stop()
|
||||
|
||||
def get_last_consume_delay_call_args(
|
||||
def assert_queue_consumption_task_call_args(
|
||||
self,
|
||||
) -> tuple[ConsumableDocument, DocumentMetadataOverrides]:
|
||||
"""
|
||||
Returns the most recent arguments to the async task
|
||||
"""
|
||||
# Must be at least 1 call
|
||||
self.consume_file_mock.assert_called()
|
||||
"""Assert the task was queued exactly once and return its call args."""
|
||||
self.consume_file_mock.assert_called_once()
|
||||
task_kwargs = self.consume_file_mock.call_args.kwargs["kwargs"]
|
||||
return (task_kwargs["input_doc"], task_kwargs["overrides"])
|
||||
|
||||
args, _ = self.consume_file_mock.call_args
|
||||
input_doc, overrides = args
|
||||
|
||||
return (input_doc, overrides)
|
||||
|
||||
def get_all_consume_delay_call_args(
|
||||
def get_all_consume_task_call_args(
|
||||
self,
|
||||
) -> Iterator[tuple[ConsumableDocument, DocumentMetadataOverrides]]:
|
||||
"""
|
||||
Iterates over all calls to the async task and returns the arguments
|
||||
"""
|
||||
# Must be at least 1 call
|
||||
"""Iterate over all queued consume task calls and yield their call args."""
|
||||
self.consume_file_mock.assert_called()
|
||||
|
||||
for args, kwargs in self.consume_file_mock.call_args_list:
|
||||
input_doc, overrides = args
|
||||
|
||||
yield (input_doc, overrides)
|
||||
|
||||
def get_specific_consume_delay_call_args(
|
||||
self,
|
||||
index: int,
|
||||
) -> tuple[ConsumableDocument, DocumentMetadataOverrides]:
|
||||
"""
|
||||
Returns the arguments of a specific call to the async task
|
||||
"""
|
||||
# Must be at least 1 call
|
||||
self.consume_file_mock.assert_called()
|
||||
|
||||
args, _ = self.consume_file_mock.call_args_list[index]
|
||||
input_doc, overrides = args
|
||||
|
||||
return (input_doc, overrides)
|
||||
for call in self.consume_file_mock.call_args_list:
|
||||
task_kwargs = call.kwargs["kwargs"]
|
||||
yield (task_kwargs["input_doc"], task_kwargs["overrides"])
|
||||
|
||||
|
||||
class TestMigrations(TransactionTestCase):
|
||||
|
||||
@@ -9,6 +9,7 @@ from collections import defaultdict
|
||||
from collections import deque
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
from time import mktime
|
||||
from typing import TYPE_CHECKING
|
||||
@@ -689,18 +690,49 @@ class EmailDocumentDetailSchema(EmailSerializer):
|
||||
"original_mime_type": serializers.CharField(),
|
||||
"media_filename": serializers.CharField(),
|
||||
"has_archive_version": serializers.BooleanField(),
|
||||
"original_metadata": serializers.DictField(),
|
||||
"archive_checksum": serializers.CharField(),
|
||||
"archive_media_filename": serializers.CharField(),
|
||||
"original_metadata": serializers.ListField(
|
||||
child=inline_serializer(
|
||||
name="OriginalMetadataEntry",
|
||||
fields={
|
||||
"namespace": serializers.CharField(),
|
||||
"prefix": serializers.CharField(),
|
||||
"key": serializers.CharField(),
|
||||
"value": serializers.CharField(),
|
||||
},
|
||||
),
|
||||
),
|
||||
"archive_checksum": serializers.CharField(
|
||||
allow_null=True,
|
||||
required=False,
|
||||
),
|
||||
"archive_media_filename": serializers.CharField(
|
||||
allow_null=True,
|
||||
required=False,
|
||||
),
|
||||
"original_filename": serializers.CharField(),
|
||||
"archive_size": serializers.IntegerField(),
|
||||
"archive_metadata": serializers.DictField(),
|
||||
"archive_size": serializers.IntegerField(
|
||||
allow_null=True,
|
||||
required=False,
|
||||
),
|
||||
"archive_metadata": serializers.ListField(
|
||||
child=inline_serializer(
|
||||
name="ArchiveMetadataEntry",
|
||||
fields={
|
||||
"namespace": serializers.CharField(),
|
||||
"prefix": serializers.CharField(),
|
||||
"key": serializers.CharField(),
|
||||
"value": serializers.CharField(),
|
||||
},
|
||||
),
|
||||
allow_null=True,
|
||||
required=False,
|
||||
),
|
||||
"lang": serializers.CharField(),
|
||||
},
|
||||
),
|
||||
400: None,
|
||||
403: None,
|
||||
404: None,
|
||||
HTTPStatus.BAD_REQUEST: None,
|
||||
HTTPStatus.FORBIDDEN: None,
|
||||
HTTPStatus.NOT_FOUND: None,
|
||||
},
|
||||
),
|
||||
notes=extend_schema(
|
||||
@@ -1770,9 +1802,9 @@ class DocumentViewSet(
|
||||
if request.user is not None:
|
||||
overrides.actor_id = request.user.id
|
||||
|
||||
async_task = consume_file.delay(
|
||||
input_doc,
|
||||
overrides,
|
||||
async_task = consume_file.apply_async(
|
||||
kwargs={"input_doc": input_doc, "overrides": overrides},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.WEB_UI},
|
||||
)
|
||||
logger.debug(
|
||||
f"Updated document {root_doc.id} with new version",
|
||||
@@ -2449,6 +2481,7 @@ class DocumentOperationPermissionMixin(PassUserMixin, DocumentSelectionMixin):
|
||||
"edit_pdf",
|
||||
"remove_password",
|
||||
}
|
||||
METHOD_NAMES_REQUIRING_TRIGGER_SOURCE = METHOD_NAMES_REQUIRING_USER
|
||||
|
||||
def _has_document_permissions(
|
||||
self,
|
||||
@@ -2539,12 +2572,19 @@ class DocumentOperationPermissionMixin(PassUserMixin, DocumentSelectionMixin):
|
||||
parameters = {
|
||||
k: v
|
||||
for k, v in validated_data.items()
|
||||
if k not in {"documents", "all", "filters"}
|
||||
if k not in {"documents", "all", "filters", "from_webui"}
|
||||
}
|
||||
user = self.request.user
|
||||
from_webui = validated_data.get("from_webui", False)
|
||||
|
||||
if method.__name__ in self.METHOD_NAMES_REQUIRING_USER:
|
||||
parameters["user"] = user
|
||||
if method.__name__ in self.METHOD_NAMES_REQUIRING_TRIGGER_SOURCE:
|
||||
parameters["trigger_source"] = (
|
||||
PaperlessTask.TriggerSource.WEB_UI
|
||||
if from_webui
|
||||
else PaperlessTask.TriggerSource.API_UPLOAD
|
||||
)
|
||||
|
||||
if not self._has_document_permissions(
|
||||
user=user,
|
||||
@@ -2628,12 +2668,19 @@ class BulkEditView(DocumentOperationPermissionMixin):
|
||||
user = self.request.user
|
||||
method = serializer.validated_data.get("method")
|
||||
parameters = serializer.validated_data.get("parameters")
|
||||
from_webui = serializer.validated_data.get("from_webui", False)
|
||||
documents = self._resolve_document_ids(
|
||||
user=user,
|
||||
validated_data=serializer.validated_data,
|
||||
)
|
||||
if method.__name__ in self.METHOD_NAMES_REQUIRING_USER:
|
||||
parameters["user"] = user
|
||||
if method.__name__ in self.METHOD_NAMES_REQUIRING_TRIGGER_SOURCE:
|
||||
parameters["trigger_source"] = (
|
||||
PaperlessTask.TriggerSource.WEB_UI
|
||||
if from_webui
|
||||
else PaperlessTask.TriggerSource.API_UPLOAD
|
||||
)
|
||||
if not self._has_document_permissions(
|
||||
user=user,
|
||||
documents=documents,
|
||||
@@ -2927,9 +2974,15 @@ class PostDocumentView(GenericAPIView[Any]):
|
||||
custom_fields=custom_fields,
|
||||
)
|
||||
|
||||
async_task = consume_file.delay(
|
||||
input_doc,
|
||||
input_doc_overrides,
|
||||
async_task = consume_file.apply_async(
|
||||
kwargs={"input_doc": input_doc, "overrides": input_doc_overrides},
|
||||
headers={
|
||||
"trigger_source": (
|
||||
PaperlessTask.TriggerSource.WEB_UI
|
||||
if from_webui
|
||||
else PaperlessTask.TriggerSource.API_UPLOAD
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
return Response(async_task.id)
|
||||
@@ -3528,7 +3581,17 @@ class BulkDownloadView(DocumentSelectionMixin, GenericAPIView[Any]):
|
||||
return response
|
||||
|
||||
|
||||
@extend_schema_view(**generate_object_with_permissions_schema(StoragePathSerializer))
|
||||
@extend_schema_view(
|
||||
**generate_object_with_permissions_schema(StoragePathSerializer),
|
||||
test=extend_schema(
|
||||
operation_id="storage_paths_test",
|
||||
description="Test a storage path template against a document.",
|
||||
request=StoragePathTestSerializer,
|
||||
responses={
|
||||
(HTTPStatus.OK, "application/json"): OpenApiTypes.STR,
|
||||
},
|
||||
),
|
||||
)
|
||||
class StoragePathViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet[StoragePath]):
|
||||
model = StoragePath
|
||||
|
||||
@@ -3565,7 +3628,10 @@ class StoragePathViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet[Storag
|
||||
response = super().destroy(request, *args, **kwargs)
|
||||
|
||||
if doc_ids:
|
||||
bulk_edit.bulk_update_documents.delay(doc_ids)
|
||||
bulk_edit.bulk_update_documents.apply_async(
|
||||
kwargs={"document_ids": doc_ids},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.SYSTEM},
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
@@ -3985,6 +4051,19 @@ class ShareLinkViewSet(
|
||||
ordering_fields = ("created", "expiration", "document")
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
rebuild=extend_schema(
|
||||
operation_id="share_link_bundles_rebuild",
|
||||
description="Reset and re-queue a share link bundle for processing.",
|
||||
responses={
|
||||
HTTPStatus.OK: ShareLinkBundleSerializer,
|
||||
(HTTPStatus.BAD_REQUEST, "application/json"): inline_serializer(
|
||||
name="RebuildBundleError",
|
||||
fields={"detail": serializers.CharField()},
|
||||
),
|
||||
},
|
||||
),
|
||||
)
|
||||
class ShareLinkBundleViewSet(PassUserMixin, ModelViewSet[ShareLinkBundle]):
|
||||
model = ShareLinkBundle
|
||||
|
||||
@@ -4062,7 +4141,10 @@ class ShareLinkBundleViewSet(PassUserMixin, ModelViewSet[ShareLinkBundle]):
|
||||
"file_path",
|
||||
],
|
||||
)
|
||||
build_share_link_bundle.delay(bundle.pk)
|
||||
build_share_link_bundle.apply_async(
|
||||
kwargs={"bundle_id": bundle.pk},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.MANUAL},
|
||||
)
|
||||
bundle.document_total = len(ordered_documents)
|
||||
response_serializer = self.get_serializer(bundle)
|
||||
headers = self.get_success_headers(response_serializer.data)
|
||||
@@ -4095,7 +4177,10 @@ class ShareLinkBundleViewSet(PassUserMixin, ModelViewSet[ShareLinkBundle]):
|
||||
"file_path",
|
||||
],
|
||||
)
|
||||
build_share_link_bundle.delay(bundle.pk)
|
||||
build_share_link_bundle.apply_async(
|
||||
kwargs={"bundle_id": bundle.pk},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.MANUAL},
|
||||
)
|
||||
bundle.document_total = (
|
||||
getattr(bundle, "document_total", None) or bundle.documents.count()
|
||||
)
|
||||
@@ -4600,7 +4685,11 @@ class SystemStatusView(PassUserMixin):
|
||||
classifier_error = "No classifier training tasks found"
|
||||
elif last_trained_task.status != PaperlessTask.Status.SUCCESS:
|
||||
classifier_status = "ERROR"
|
||||
classifier_error = last_trained_task.result_message
|
||||
classifier_error = (
|
||||
last_trained_task.result_data.get("error_message")
|
||||
if last_trained_task.result_data
|
||||
else None
|
||||
)
|
||||
classifier_last_trained = (
|
||||
last_trained_task.date_done if last_trained_task else None
|
||||
)
|
||||
@@ -4620,7 +4709,11 @@ class SystemStatusView(PassUserMixin):
|
||||
sanity_check_error = "No sanity check tasks found"
|
||||
elif last_sanity_check.status != PaperlessTask.Status.SUCCESS:
|
||||
sanity_check_status = "ERROR"
|
||||
sanity_check_error = last_sanity_check.result_message
|
||||
sanity_check_error = (
|
||||
last_sanity_check.result_data.get("error_message")
|
||||
if last_sanity_check.result_data
|
||||
else None
|
||||
)
|
||||
sanity_check_last_run = (
|
||||
last_sanity_check.date_done if last_sanity_check else None
|
||||
)
|
||||
@@ -4645,7 +4738,11 @@ class SystemStatusView(PassUserMixin):
|
||||
llmindex_error = "No LLM index update tasks found"
|
||||
elif last_llmindex_update.status == PaperlessTask.Status.FAILURE:
|
||||
llmindex_status = "ERROR"
|
||||
llmindex_error = last_llmindex_update.result_message
|
||||
llmindex_error = (
|
||||
last_llmindex_update.result_data.get("error_message")
|
||||
if last_llmindex_update.result_data
|
||||
else None
|
||||
)
|
||||
llmindex_last_modified = (
|
||||
last_llmindex_update.date_done if last_llmindex_update else None
|
||||
)
|
||||
|
||||
@@ -253,12 +253,14 @@ def execute_webhook_action(
|
||||
document.mime_type,
|
||||
),
|
||||
}
|
||||
send_webhook.delay(
|
||||
url=action.webhook.url,
|
||||
data=data,
|
||||
headers=headers,
|
||||
files=files,
|
||||
as_json=action.webhook.as_json,
|
||||
send_webhook.apply_async(
|
||||
kwargs={
|
||||
"url": action.webhook.url,
|
||||
"data": data,
|
||||
"headers": headers,
|
||||
"files": files,
|
||||
"as_json": action.webhook.as_json,
|
||||
},
|
||||
)
|
||||
logger.debug(
|
||||
f"Webhook to {action.webhook.url} queued",
|
||||
|
||||
@@ -2,7 +2,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: paperless-ngx\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2026-04-20 16:29+0000\n"
|
||||
"POT-Creation-Date: 2026-04-20 20:20+0000\n"
|
||||
"PO-Revision-Date: 2022-02-17 04:17\n"
|
||||
"Last-Translator: \n"
|
||||
"Language-Team: English\n"
|
||||
@@ -57,31 +57,31 @@ msgstr ""
|
||||
msgid "Custom field not found"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:39 documents/models.py:923 documents/models.py:971
|
||||
#: documents/models.py:39 documents/models.py:916 documents/models.py:964
|
||||
msgid "owner"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:56 documents/models.py:1253
|
||||
#: documents/models.py:56 documents/models.py:1246
|
||||
msgid "None"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:57 documents/models.py:1254
|
||||
#: documents/models.py:57 documents/models.py:1247
|
||||
msgid "Any word"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:58 documents/models.py:1255
|
||||
#: documents/models.py:58 documents/models.py:1248
|
||||
msgid "All words"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:59 documents/models.py:1256
|
||||
#: documents/models.py:59 documents/models.py:1249
|
||||
msgid "Exact match"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:60 documents/models.py:1257
|
||||
#: documents/models.py:60 documents/models.py:1250
|
||||
msgid "Regular expression"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:61 documents/models.py:1258
|
||||
#: documents/models.py:61 documents/models.py:1251
|
||||
msgid "Fuzzy word"
|
||||
msgstr ""
|
||||
|
||||
@@ -89,20 +89,20 @@ msgstr ""
|
||||
msgid "Automatic"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:65 documents/models.py:538 documents/models.py:1836
|
||||
#: documents/models.py:65 documents/models.py:538 documents/models.py:1829
|
||||
#: paperless_mail/models.py:23 paperless_mail/models.py:143
|
||||
msgid "name"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:67 documents/models.py:1322
|
||||
#: documents/models.py:67 documents/models.py:1315
|
||||
msgid "match"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:70 documents/models.py:1325
|
||||
#: documents/models.py:70 documents/models.py:1318
|
||||
msgid "matching algorithm"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:75 documents/models.py:1330
|
||||
#: documents/models.py:75 documents/models.py:1323
|
||||
msgid "is insensitive"
|
||||
msgstr ""
|
||||
|
||||
@@ -168,7 +168,7 @@ msgstr ""
|
||||
msgid "title"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:190 documents/models.py:837
|
||||
#: documents/models.py:190 documents/models.py:830
|
||||
msgid "content"
|
||||
msgstr ""
|
||||
|
||||
@@ -206,8 +206,8 @@ msgstr ""
|
||||
msgid "The number of pages of the document."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:245 documents/models.py:843 documents/models.py:881
|
||||
#: documents/models.py:943 documents/models.py:1061 documents/models.py:1120
|
||||
#: documents/models.py:245 documents/models.py:836 documents/models.py:874
|
||||
#: documents/models.py:936 documents/models.py:1054 documents/models.py:1113
|
||||
msgid "created"
|
||||
msgstr ""
|
||||
|
||||
@@ -271,12 +271,12 @@ msgstr ""
|
||||
msgid "Optional short label for a document version."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:339 documents/models.py:854 documents/models.py:908
|
||||
#: documents/models.py:1879
|
||||
#: documents/models.py:339 documents/models.py:847 documents/models.py:901
|
||||
#: documents/models.py:1872
|
||||
msgid "document"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:340 documents/models.py:1014
|
||||
#: documents/models.py:340 documents/models.py:1007
|
||||
msgid "documents"
|
||||
msgstr ""
|
||||
|
||||
@@ -296,11 +296,11 @@ msgstr ""
|
||||
msgid "Title"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:525 documents/models.py:746 documents/models.py:1274
|
||||
#: documents/models.py:525 documents/models.py:746 documents/models.py:1267
|
||||
msgid "Created"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:526 documents/models.py:1273
|
||||
#: documents/models.py:526 documents/models.py:1266
|
||||
msgid "Added"
|
||||
msgstr ""
|
||||
|
||||
@@ -584,7 +584,7 @@ msgstr ""
|
||||
msgid "filter rules"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:675 documents/models.py:937
|
||||
#: documents/models.py:675 documents/models.py:930
|
||||
msgid "Pending"
|
||||
msgstr ""
|
||||
|
||||
@@ -620,7 +620,7 @@ msgstr ""
|
||||
msgid "Index Optimize"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:686 documents/models.py:1269
|
||||
#: documents/models.py:686 documents/models.py:1262
|
||||
msgid "Mail Fetch"
|
||||
msgstr ""
|
||||
|
||||
@@ -652,11 +652,11 @@ msgstr ""
|
||||
msgid "Bulk Delete"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:702 documents/models.py:1264
|
||||
#: documents/models.py:702 documents/models.py:1257
|
||||
msgid "Scheduled"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:703 documents/models.py:1270
|
||||
#: documents/models.py:703 documents/models.py:1263
|
||||
msgid "Web UI"
|
||||
msgstr ""
|
||||
|
||||
@@ -745,623 +745,615 @@ msgid "Structured result data from task execution"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:796
|
||||
msgid "Result Message"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:797
|
||||
msgid "Human-readable result message"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:803
|
||||
msgid "Acknowledged"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:808
|
||||
#: documents/models.py:801
|
||||
msgid "Task"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:809
|
||||
#: documents/models.py:802
|
||||
msgid "Tasks"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:839
|
||||
#: documents/models.py:832
|
||||
msgid "Note for the document"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:863
|
||||
#: documents/models.py:856
|
||||
msgid "user"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:868
|
||||
#: documents/models.py:861
|
||||
msgid "note"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:869
|
||||
#: documents/models.py:862
|
||||
msgid "notes"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:877
|
||||
#: documents/models.py:870
|
||||
msgid "Archive"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:878
|
||||
#: documents/models.py:871
|
||||
msgid "Original"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:889 documents/models.py:951 paperless_mail/models.py:75
|
||||
#: documents/models.py:882 documents/models.py:944 paperless_mail/models.py:75
|
||||
msgid "expiration"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:896 documents/models.py:958
|
||||
#: documents/models.py:889 documents/models.py:951
|
||||
msgid "slug"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:928
|
||||
#: documents/models.py:921
|
||||
msgid "share link"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:929
|
||||
#: documents/models.py:922
|
||||
msgid "share links"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:938
|
||||
#: documents/models.py:931
|
||||
msgid "Processing"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:939
|
||||
#: documents/models.py:932
|
||||
msgid "Ready"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:940
|
||||
#: documents/models.py:933
|
||||
msgid "Failed"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:987
|
||||
#: documents/models.py:980
|
||||
msgid "size (bytes)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:993
|
||||
#: documents/models.py:986
|
||||
msgid "last error"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1000
|
||||
#: documents/models.py:993
|
||||
msgid "file path"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1006
|
||||
#: documents/models.py:999
|
||||
msgid "built at"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1019
|
||||
#: documents/models.py:1012
|
||||
msgid "share link bundle"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1020
|
||||
#: documents/models.py:1013
|
||||
msgid "share link bundles"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1023
|
||||
#: documents/models.py:1016
|
||||
#, python-format
|
||||
msgid "Share link bundle %(slug)s"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1049
|
||||
#: documents/models.py:1042
|
||||
msgid "String"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1050
|
||||
#: documents/models.py:1043
|
||||
msgid "URL"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1051
|
||||
#: documents/models.py:1044
|
||||
msgid "Date"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1052
|
||||
#: documents/models.py:1045
|
||||
msgid "Boolean"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1053
|
||||
#: documents/models.py:1046
|
||||
msgid "Integer"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1054
|
||||
#: documents/models.py:1047
|
||||
msgid "Float"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1055
|
||||
#: documents/models.py:1048
|
||||
msgid "Monetary"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1056
|
||||
#: documents/models.py:1049
|
||||
msgid "Document Link"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1057
|
||||
#: documents/models.py:1050
|
||||
msgid "Select"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1058
|
||||
#: documents/models.py:1051
|
||||
msgid "Long Text"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1070
|
||||
#: documents/models.py:1063
|
||||
msgid "data type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1077
|
||||
#: documents/models.py:1070
|
||||
msgid "extra data"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1081
|
||||
#: documents/models.py:1074
|
||||
msgid "Extra data for the custom field, such as select options"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1087
|
||||
#: documents/models.py:1080
|
||||
msgid "custom field"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1088
|
||||
#: documents/models.py:1081
|
||||
msgid "custom fields"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1188
|
||||
#: documents/models.py:1181
|
||||
msgid "custom field instance"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1189
|
||||
#: documents/models.py:1182
|
||||
msgid "custom field instances"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1261
|
||||
#: documents/models.py:1254
|
||||
msgid "Consumption Started"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1262
|
||||
#: documents/models.py:1255
|
||||
msgid "Document Added"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1263
|
||||
#: documents/models.py:1256
|
||||
msgid "Document Updated"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1267
|
||||
#: documents/models.py:1260
|
||||
msgid "Consume Folder"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1268
|
||||
#: documents/models.py:1261
|
||||
msgid "Api Upload"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1275
|
||||
#: documents/models.py:1268
|
||||
msgid "Modified"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1276
|
||||
#: documents/models.py:1269
|
||||
msgid "Custom Field"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1279
|
||||
#: documents/models.py:1272
|
||||
msgid "Workflow Trigger Type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1291
|
||||
#: documents/models.py:1284
|
||||
msgid "filter path"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1296
|
||||
#: documents/models.py:1289
|
||||
msgid ""
|
||||
"Only consume documents with a path that matches this if specified. Wildcards "
|
||||
"specified as * are allowed. Case insensitive."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1303
|
||||
#: documents/models.py:1296
|
||||
msgid "filter filename"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1308 paperless_mail/models.py:200
|
||||
#: documents/models.py:1301 paperless_mail/models.py:200
|
||||
msgid ""
|
||||
"Only consume documents which entirely match this filename if specified. "
|
||||
"Wildcards such as *.pdf or *invoice* are allowed. Case insensitive."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1319
|
||||
#: documents/models.py:1312
|
||||
msgid "filter documents from this mail rule"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1335
|
||||
#: documents/models.py:1328
|
||||
msgid "has these tag(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1342
|
||||
#: documents/models.py:1335
|
||||
msgid "has all of these tag(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1349
|
||||
#: documents/models.py:1342
|
||||
msgid "does not have these tag(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1357
|
||||
#: documents/models.py:1350
|
||||
msgid "has this document type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1364
|
||||
#: documents/models.py:1357
|
||||
msgid "has one of these document types"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1371
|
||||
#: documents/models.py:1364
|
||||
msgid "does not have these document type(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1379
|
||||
#: documents/models.py:1372
|
||||
msgid "has this correspondent"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1386
|
||||
#: documents/models.py:1379
|
||||
msgid "does not have these correspondent(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1393
|
||||
#: documents/models.py:1386
|
||||
msgid "has one of these correspondents"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1401
|
||||
#: documents/models.py:1394
|
||||
msgid "has this storage path"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1408
|
||||
#: documents/models.py:1401
|
||||
msgid "has one of these storage paths"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1415
|
||||
#: documents/models.py:1408
|
||||
msgid "does not have these storage path(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1419
|
||||
#: documents/models.py:1412
|
||||
msgid "filter custom field query"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1422
|
||||
#: documents/models.py:1415
|
||||
msgid "JSON-encoded custom field query expression."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1426
|
||||
#: documents/models.py:1419
|
||||
msgid "schedule offset days"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1429
|
||||
#: documents/models.py:1422
|
||||
msgid "The number of days to offset the schedule trigger by."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1434
|
||||
#: documents/models.py:1427
|
||||
msgid "schedule is recurring"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1437
|
||||
#: documents/models.py:1430
|
||||
msgid "If the schedule should be recurring."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1442
|
||||
#: documents/models.py:1435
|
||||
msgid "schedule recurring delay in days"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1446
|
||||
#: documents/models.py:1439
|
||||
msgid "The number of days between recurring schedule triggers."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1451
|
||||
#: documents/models.py:1444
|
||||
msgid "schedule date field"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1456
|
||||
#: documents/models.py:1449
|
||||
msgid "The field to check for a schedule trigger."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1465
|
||||
#: documents/models.py:1458
|
||||
msgid "schedule date custom field"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1469
|
||||
#: documents/models.py:1462
|
||||
msgid "workflow trigger"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1470
|
||||
#: documents/models.py:1463
|
||||
msgid "workflow triggers"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1478
|
||||
#: documents/models.py:1471
|
||||
msgid "email subject"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1482
|
||||
#: documents/models.py:1475
|
||||
msgid ""
|
||||
"The subject of the email, can include some placeholders, see documentation."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1488
|
||||
#: documents/models.py:1481
|
||||
msgid "email body"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1491
|
||||
#: documents/models.py:1484
|
||||
msgid ""
|
||||
"The body (message) of the email, can include some placeholders, see "
|
||||
"documentation."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1497
|
||||
#: documents/models.py:1490
|
||||
msgid "emails to"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1500
|
||||
#: documents/models.py:1493
|
||||
msgid "The destination email addresses, comma separated."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1506
|
||||
#: documents/models.py:1499
|
||||
msgid "include document in email"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1517
|
||||
#: documents/models.py:1510
|
||||
msgid "webhook url"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1520
|
||||
#: documents/models.py:1513
|
||||
msgid "The destination URL for the notification."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1525
|
||||
#: documents/models.py:1518
|
||||
msgid "use parameters"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1530
|
||||
#: documents/models.py:1523
|
||||
msgid "send as JSON"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1534
|
||||
#: documents/models.py:1527
|
||||
msgid "webhook parameters"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1537
|
||||
#: documents/models.py:1530
|
||||
msgid "The parameters to send with the webhook URL if body not used."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1541
|
||||
#: documents/models.py:1534
|
||||
msgid "webhook body"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1544
|
||||
#: documents/models.py:1537
|
||||
msgid "The body to send with the webhook URL if parameters not used."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1548
|
||||
#: documents/models.py:1541
|
||||
msgid "webhook headers"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1551
|
||||
#: documents/models.py:1544
|
||||
msgid "The headers to send with the webhook URL."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1556
|
||||
#: documents/models.py:1549
|
||||
msgid "include document in webhook"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1567
|
||||
#: documents/models.py:1560
|
||||
msgid "Assignment"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1571
|
||||
#: documents/models.py:1564
|
||||
msgid "Removal"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1575 documents/templates/account/password_reset.html:15
|
||||
#: documents/models.py:1568 documents/templates/account/password_reset.html:15
|
||||
msgid "Email"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1579
|
||||
#: documents/models.py:1572
|
||||
msgid "Webhook"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1583
|
||||
#: documents/models.py:1576
|
||||
msgid "Password removal"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1587
|
||||
#: documents/models.py:1580
|
||||
msgid "Move to trash"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1591
|
||||
#: documents/models.py:1584
|
||||
msgid "Workflow Action Type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1596 documents/models.py:1838
|
||||
#: documents/models.py:1589 documents/models.py:1831
|
||||
#: paperless_mail/models.py:145
|
||||
msgid "order"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1599
|
||||
#: documents/models.py:1592
|
||||
msgid "assign title"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1603
|
||||
#: documents/models.py:1596
|
||||
msgid "Assign a document title, must be a Jinja2 template, see documentation."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1611 paperless_mail/models.py:274
|
||||
#: documents/models.py:1604 paperless_mail/models.py:274
|
||||
msgid "assign this tag"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1620 paperless_mail/models.py:282
|
||||
#: documents/models.py:1613 paperless_mail/models.py:282
|
||||
msgid "assign this document type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1629 paperless_mail/models.py:296
|
||||
#: documents/models.py:1622 paperless_mail/models.py:296
|
||||
msgid "assign this correspondent"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1638
|
||||
#: documents/models.py:1631
|
||||
msgid "assign this storage path"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1647
|
||||
#: documents/models.py:1640
|
||||
msgid "assign this owner"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1654
|
||||
#: documents/models.py:1647
|
||||
msgid "grant view permissions to these users"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1661
|
||||
#: documents/models.py:1654
|
||||
msgid "grant view permissions to these groups"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1668
|
||||
#: documents/models.py:1661
|
||||
msgid "grant change permissions to these users"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1675
|
||||
#: documents/models.py:1668
|
||||
msgid "grant change permissions to these groups"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1682
|
||||
#: documents/models.py:1675
|
||||
msgid "assign these custom fields"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1686
|
||||
#: documents/models.py:1679
|
||||
msgid "custom field values"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1690
|
||||
#: documents/models.py:1683
|
||||
msgid "Optional values to assign to the custom fields."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1699
|
||||
#: documents/models.py:1692
|
||||
msgid "remove these tag(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1704
|
||||
#: documents/models.py:1697
|
||||
msgid "remove all tags"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1711
|
||||
#: documents/models.py:1704
|
||||
msgid "remove these document type(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1716
|
||||
#: documents/models.py:1709
|
||||
msgid "remove all document types"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1723
|
||||
#: documents/models.py:1716
|
||||
msgid "remove these correspondent(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1728
|
||||
#: documents/models.py:1721
|
||||
msgid "remove all correspondents"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1735
|
||||
#: documents/models.py:1728
|
||||
msgid "remove these storage path(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1740
|
||||
#: documents/models.py:1733
|
||||
msgid "remove all storage paths"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1747
|
||||
#: documents/models.py:1740
|
||||
msgid "remove these owner(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1752
|
||||
#: documents/models.py:1745
|
||||
msgid "remove all owners"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1759
|
||||
#: documents/models.py:1752
|
||||
msgid "remove view permissions for these users"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1766
|
||||
#: documents/models.py:1759
|
||||
msgid "remove view permissions for these groups"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1773
|
||||
#: documents/models.py:1766
|
||||
msgid "remove change permissions for these users"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1780
|
||||
#: documents/models.py:1773
|
||||
msgid "remove change permissions for these groups"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1785
|
||||
#: documents/models.py:1778
|
||||
msgid "remove all permissions"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1792
|
||||
#: documents/models.py:1785
|
||||
msgid "remove these custom fields"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1797
|
||||
#: documents/models.py:1790
|
||||
msgid "remove all custom fields"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1806
|
||||
#: documents/models.py:1799
|
||||
msgid "email"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1815
|
||||
#: documents/models.py:1808
|
||||
msgid "webhook"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1819
|
||||
#: documents/models.py:1812
|
||||
msgid "passwords"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1823
|
||||
#: documents/models.py:1816
|
||||
msgid ""
|
||||
"Passwords to try when removing PDF protection. Separate with commas or new "
|
||||
"lines."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1828
|
||||
#: documents/models.py:1821
|
||||
msgid "workflow action"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1829
|
||||
#: documents/models.py:1822
|
||||
msgid "workflow actions"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1844
|
||||
#: documents/models.py:1837
|
||||
msgid "triggers"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1851
|
||||
#: documents/models.py:1844
|
||||
msgid "actions"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1854 paperless_mail/models.py:154
|
||||
#: documents/models.py:1847 paperless_mail/models.py:154
|
||||
msgid "enabled"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1865
|
||||
#: documents/models.py:1858
|
||||
msgid "workflow"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1869
|
||||
#: documents/models.py:1862
|
||||
msgid "workflow trigger type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1883
|
||||
#: documents/models.py:1876
|
||||
msgid "date run"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1889
|
||||
#: documents/models.py:1882
|
||||
msgid "workflow run"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1890
|
||||
#: documents/models.py:1883
|
||||
msgid "workflow runs"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:463 documents/serialisers.py:815
|
||||
#: documents/serialisers.py:2664 documents/views.py:2223
|
||||
#: documents/views.py:2292 paperless_mail/serialisers.py:143
|
||||
#: documents/serialisers.py:2681 documents/views.py:2255
|
||||
#: documents/views.py:2324 paperless_mail/serialisers.py:143
|
||||
msgid "Insufficient permissions."
|
||||
msgstr ""
|
||||
|
||||
@@ -1369,39 +1361,39 @@ msgstr ""
|
||||
msgid "Invalid color."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2170
|
||||
#: documents/serialisers.py:2175
|
||||
#, python-format
|
||||
msgid "File type %(type)s not supported"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2214
|
||||
#: documents/serialisers.py:2219
|
||||
#, python-format
|
||||
msgid "Custom field id must be an integer: %(id)s"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2221
|
||||
#: documents/serialisers.py:2226
|
||||
#, python-format
|
||||
msgid "Custom field with id %(id)s does not exist"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2238 documents/serialisers.py:2248
|
||||
#: documents/serialisers.py:2243 documents/serialisers.py:2253
|
||||
msgid ""
|
||||
"Custom fields must be a list of integers or an object mapping ids to values."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2243
|
||||
#: documents/serialisers.py:2248
|
||||
msgid "Some custom fields don't exist or were specified twice."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2390
|
||||
#: documents/serialisers.py:2395
|
||||
msgid "Invalid variable detected."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2720
|
||||
#: documents/serialisers.py:2737
|
||||
msgid "Duplicate document identifiers are not allowed."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2750 documents/views.py:4025
|
||||
#: documents/serialisers.py:2767 documents/views.py:4104
|
||||
#, python-format
|
||||
msgid "Documents not found: %(ids)s"
|
||||
msgstr ""
|
||||
@@ -1669,28 +1661,28 @@ msgstr ""
|
||||
msgid "Unable to parse URI {value}"
|
||||
msgstr ""
|
||||
|
||||
#: documents/views.py:2103
|
||||
#: documents/views.py:2135
|
||||
msgid "Specify only one of text, title_search, query, or more_like_id."
|
||||
msgstr ""
|
||||
|
||||
#: documents/views.py:2216 documents/views.py:2289
|
||||
#: documents/views.py:2248 documents/views.py:2321
|
||||
msgid "Invalid more_like_id"
|
||||
msgstr ""
|
||||
|
||||
#: documents/views.py:4037
|
||||
#: documents/views.py:4116
|
||||
#, python-format
|
||||
msgid "Insufficient permissions to share document %(id)s."
|
||||
msgstr ""
|
||||
|
||||
#: documents/views.py:4080
|
||||
#: documents/views.py:4162
|
||||
msgid "Bundle is already being processed."
|
||||
msgstr ""
|
||||
|
||||
#: documents/views.py:4137
|
||||
#: documents/views.py:4222
|
||||
msgid "The share link bundle is still being prepared. Please try again later."
|
||||
msgstr ""
|
||||
|
||||
#: documents/views.py:4147
|
||||
#: documents/views.py:4232
|
||||
msgid "The share link bundle is unavailable."
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.loggers import LoggingMixin
|
||||
from documents.models import Correspondent
|
||||
from documents.models import PaperlessTask
|
||||
from documents.parsers import is_mime_type_supported
|
||||
from documents.tasks import consume_file
|
||||
from paperless.network import is_public_ip
|
||||
@@ -238,7 +239,7 @@ def mailbox_login(mailbox: MailBox, account: MailAccount) -> None:
|
||||
|
||||
@shared_task
|
||||
def apply_mail_action(
|
||||
result: list[str],
|
||||
result: list,
|
||||
rule_id: int,
|
||||
message_uid: str,
|
||||
message_subject: str,
|
||||
@@ -893,8 +894,12 @@ class MailAccountHandler(LoggingMixin):
|
||||
)
|
||||
|
||||
consume_task = consume_file.s(
|
||||
input_doc,
|
||||
doc_overrides,
|
||||
input_doc=input_doc,
|
||||
overrides=doc_overrides,
|
||||
).set(
|
||||
headers={
|
||||
"trigger_source": PaperlessTask.TriggerSource.EMAIL_CONSUME,
|
||||
},
|
||||
)
|
||||
|
||||
consume_tasks.append(consume_task)
|
||||
@@ -991,9 +996,9 @@ class MailAccountHandler(LoggingMixin):
|
||||
)
|
||||
|
||||
consume_task = consume_file.s(
|
||||
input_doc,
|
||||
doc_overrides,
|
||||
)
|
||||
input_doc=input_doc,
|
||||
overrides=doc_overrides,
|
||||
).set(headers={"trigger_source": PaperlessTask.TriggerSource.EMAIL_CONSUME})
|
||||
|
||||
queue_consumption_tasks(
|
||||
consume_tasks=[consume_task],
|
||||
|
||||
@@ -4,6 +4,7 @@ import pytest
|
||||
|
||||
from paperless_mail.mail import MailAccountHandler
|
||||
from paperless_mail.models import MailAccount
|
||||
from paperless_mail.tests.factories import MailAccountFactory
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -11,14 +12,13 @@ def greenmail_mail_account(db: None) -> Generator[MailAccount, None, None]:
|
||||
"""
|
||||
Create a mail account configured for local Greenmail server.
|
||||
"""
|
||||
account = MailAccount.objects.create(
|
||||
account = MailAccountFactory(
|
||||
name="Greenmail Test",
|
||||
imap_server="localhost",
|
||||
imap_port=3143,
|
||||
imap_security=MailAccount.ImapSecurity.NONE,
|
||||
username="test@localhost",
|
||||
password="test",
|
||||
character_set="UTF-8",
|
||||
)
|
||||
yield account
|
||||
account.delete()
|
||||
|
||||
57
src/paperless_mail/tests/factories.py
Normal file
57
src/paperless_mail/tests/factories.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import factory
|
||||
from django.utils import timezone
|
||||
from factory.django import DjangoModelFactory
|
||||
|
||||
from paperless_mail.models import MailAccount
|
||||
from paperless_mail.models import MailRule
|
||||
from paperless_mail.models import ProcessedMail
|
||||
|
||||
|
||||
class MailAccountFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = MailAccount
|
||||
|
||||
name = factory.Sequence(lambda n: f"Mail Account {n}")
|
||||
imap_server = "imap.example.com"
|
||||
imap_port = 993
|
||||
imap_security = MailAccount.ImapSecurity.SSL
|
||||
username = factory.Sequence(lambda n: f"user{n}@example.com")
|
||||
password = "password"
|
||||
character_set = "UTF-8"
|
||||
account_type = MailAccount.MailAccountType.IMAP
|
||||
is_token = False
|
||||
|
||||
|
||||
class MailRuleFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = MailRule
|
||||
|
||||
name = factory.Sequence(lambda n: f"Mail Rule {n}")
|
||||
account = factory.SubFactory(MailAccountFactory)
|
||||
enabled = True
|
||||
folder = "INBOX"
|
||||
order = 0
|
||||
maximum_age = 30
|
||||
attachment_type = MailRule.AttachmentProcessing.ATTACHMENTS_ONLY
|
||||
consumption_scope = MailRule.ConsumptionScope.ATTACHMENTS_ONLY
|
||||
pdf_layout = MailRule.PdfLayout.DEFAULT
|
||||
action = MailRule.MailAction.MARK_READ
|
||||
assign_title_from = MailRule.TitleSource.FROM_SUBJECT
|
||||
assign_correspondent_from = MailRule.CorrespondentSource.FROM_NOTHING
|
||||
assign_owner_from_rule = True
|
||||
stop_processing = False
|
||||
|
||||
|
||||
class ProcessedMailFactory(DjangoModelFactory):
|
||||
class Meta:
|
||||
model = ProcessedMail
|
||||
|
||||
rule = factory.SubFactory(MailRuleFactory)
|
||||
folder = "INBOX"
|
||||
uid = factory.Sequence(lambda n: str(n))
|
||||
subject = factory.Faker("sentence", nb_words=4)
|
||||
received = factory.LazyFunction(timezone.now)
|
||||
processed = factory.LazyFunction(timezone.now)
|
||||
status = "SUCCESS"
|
||||
@@ -3,18 +3,20 @@ from unittest import mock
|
||||
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
from django.utils import timezone
|
||||
from guardian.shortcuts import assign_perm
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from documents.models import Correspondent
|
||||
from documents.models import DocumentType
|
||||
from documents.models import Tag
|
||||
from documents.tests.factories import CorrespondentFactory
|
||||
from documents.tests.factories import DocumentTypeFactory
|
||||
from documents.tests.factories import TagFactory
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from paperless_mail.models import MailAccount
|
||||
from paperless_mail.models import MailRule
|
||||
from paperless_mail.models import ProcessedMail
|
||||
from paperless_mail.tests.factories import MailAccountFactory
|
||||
from paperless_mail.tests.factories import MailRuleFactory
|
||||
from paperless_mail.tests.factories import ProcessedMailFactory
|
||||
from paperless_mail.tests.test_mail import BogusMailBox
|
||||
|
||||
|
||||
@@ -46,14 +48,12 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
|
||||
- Configured mail accounts are provided
|
||||
"""
|
||||
|
||||
account1 = MailAccount.objects.create(
|
||||
account1 = MailAccountFactory(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
@@ -118,15 +118,7 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
|
||||
- Account is deleted
|
||||
"""
|
||||
|
||||
account1 = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
account1 = MailAccountFactory()
|
||||
|
||||
response = self.client.delete(
|
||||
f"{self.ENDPOINT}{account1.pk}/",
|
||||
@@ -146,15 +138,7 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
|
||||
- The mail account is updated, password only updated if not '****'
|
||||
"""
|
||||
|
||||
account1 = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
account1 = MailAccountFactory()
|
||||
|
||||
response = self.client.patch(
|
||||
f"{self.ENDPOINT}{account1.pk}/",
|
||||
@@ -245,14 +229,11 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- API returns success
|
||||
"""
|
||||
account = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
account = MailAccountFactory(
|
||||
username="admin",
|
||||
password="secret",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
@@ -302,51 +283,10 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
|
||||
|
||||
user2 = User.objects.create_user(username="temp_admin2")
|
||||
|
||||
account1 = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
|
||||
account2 = MailAccount.objects.create(
|
||||
name="Email2",
|
||||
username="username2",
|
||||
password="password2",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
account2.owner = self.user
|
||||
account2.save()
|
||||
|
||||
account3 = MailAccount.objects.create(
|
||||
name="Email3",
|
||||
username="username3",
|
||||
password="password3",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
account3.owner = user2
|
||||
account3.save()
|
||||
|
||||
account4 = MailAccount.objects.create(
|
||||
name="Email4",
|
||||
username="username4",
|
||||
password="password4",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
account4.owner = user2
|
||||
account4.save()
|
||||
account1 = MailAccountFactory(name="Email1")
|
||||
account2 = MailAccountFactory(name="Email2", owner=self.user)
|
||||
_account3 = MailAccountFactory(name="Email3", owner=user2)
|
||||
account4 = MailAccountFactory(name="Email4", owner=user2)
|
||||
assign_perm("view_mailaccount", self.user, account4)
|
||||
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
@@ -379,31 +319,15 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
|
||||
- Configured mail rules are provided
|
||||
"""
|
||||
|
||||
account1 = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
|
||||
rule1 = MailRule.objects.create(
|
||||
account1 = MailAccountFactory()
|
||||
rule1 = MailRuleFactory(
|
||||
name="Rule1",
|
||||
account=account1,
|
||||
folder="INBOX",
|
||||
filter_from="from@example.com",
|
||||
filter_to="someone@somewhere.com",
|
||||
filter_subject="subject",
|
||||
filter_body="body",
|
||||
filter_attachment_filename_include="file.pdf",
|
||||
maximum_age=30,
|
||||
action=MailRule.MailAction.MARK_READ,
|
||||
assign_title_from=MailRule.TitleSource.FROM_SUBJECT,
|
||||
assign_correspondent_from=MailRule.CorrespondentSource.FROM_NOTHING,
|
||||
order=0,
|
||||
attachment_type=MailRule.AttachmentProcessing.ATTACHMENTS_ONLY,
|
||||
)
|
||||
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
@@ -443,27 +367,10 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
|
||||
- A new mail rule is created
|
||||
"""
|
||||
|
||||
account1 = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
|
||||
tag = Tag.objects.create(
|
||||
name="t",
|
||||
)
|
||||
|
||||
correspondent = Correspondent.objects.create(
|
||||
name="c",
|
||||
)
|
||||
|
||||
document_type = DocumentType.objects.create(
|
||||
name="dt",
|
||||
)
|
||||
account1 = MailAccountFactory()
|
||||
tag = TagFactory(name="t")
|
||||
correspondent = CorrespondentFactory(name="c")
|
||||
document_type = DocumentTypeFactory(name="dt")
|
||||
|
||||
rule1 = {
|
||||
"name": "Rule1",
|
||||
@@ -548,31 +455,8 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
|
||||
- Rule is deleted
|
||||
"""
|
||||
|
||||
account1 = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
|
||||
rule1 = MailRule.objects.create(
|
||||
name="Rule1",
|
||||
account=account1,
|
||||
folder="INBOX",
|
||||
filter_from="from@example.com",
|
||||
filter_subject="subject",
|
||||
filter_body="body",
|
||||
filter_attachment_filename_include="file.pdf",
|
||||
maximum_age=30,
|
||||
action=MailRule.MailAction.MARK_READ,
|
||||
assign_title_from=MailRule.TitleSource.FROM_SUBJECT,
|
||||
assign_correspondent_from=MailRule.CorrespondentSource.FROM_NOTHING,
|
||||
order=0,
|
||||
attachment_type=MailRule.AttachmentProcessing.ATTACHMENTS_ONLY,
|
||||
)
|
||||
account1 = MailAccountFactory()
|
||||
rule1 = MailRuleFactory(account=account1)
|
||||
|
||||
response = self.client.delete(
|
||||
f"{self.ENDPOINT}{rule1.pk}/",
|
||||
@@ -592,31 +476,8 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
|
||||
- The mail rule is updated
|
||||
"""
|
||||
|
||||
account1 = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
|
||||
rule1 = MailRule.objects.create(
|
||||
name="Rule1",
|
||||
account=account1,
|
||||
folder="INBOX",
|
||||
filter_from="from@example.com",
|
||||
filter_subject="subject",
|
||||
filter_body="body",
|
||||
filter_attachment_filename_include="file.pdf",
|
||||
maximum_age=30,
|
||||
action=MailRule.MailAction.MARK_READ,
|
||||
assign_title_from=MailRule.TitleSource.FROM_SUBJECT,
|
||||
assign_correspondent_from=MailRule.CorrespondentSource.FROM_NOTHING,
|
||||
order=0,
|
||||
attachment_type=MailRule.AttachmentProcessing.ATTACHMENTS_ONLY,
|
||||
)
|
||||
account1 = MailAccountFactory()
|
||||
rule1 = MailRuleFactory(account=account1)
|
||||
|
||||
response = self.client.patch(
|
||||
f"{self.ENDPOINT}{rule1.pk}/",
|
||||
@@ -634,16 +495,7 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
|
||||
|
||||
def test_create_mail_rule_forbidden_for_unpermitted_account(self) -> None:
|
||||
other_user = User.objects.create_user(username="mail-owner")
|
||||
foreign_account = MailAccount.objects.create(
|
||||
name="ForeignEmail",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
owner=other_user,
|
||||
)
|
||||
foreign_account = MailAccountFactory(name="ForeignEmail", owner=other_user)
|
||||
|
||||
response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
@@ -668,16 +520,7 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
|
||||
self,
|
||||
) -> None:
|
||||
other_user = User.objects.create_user(username="mail-owner")
|
||||
foreign_account = MailAccount.objects.create(
|
||||
name="ForeignEmail",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
owner=other_user,
|
||||
)
|
||||
foreign_account = MailAccountFactory(name="ForeignEmail", owner=other_user)
|
||||
assign_perm("change_mailaccount", self.user, foreign_account)
|
||||
|
||||
response = self.client.post(
|
||||
@@ -700,38 +543,10 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(MailRule.objects.get().account, foreign_account)
|
||||
|
||||
def test_update_mail_rule_forbidden_for_unpermitted_account(self) -> None:
|
||||
own_account = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
own_account = MailAccountFactory()
|
||||
other_user = User.objects.create_user(username="mail-owner")
|
||||
foreign_account = MailAccount.objects.create(
|
||||
name="ForeignEmail",
|
||||
username="username2",
|
||||
password="password2",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
owner=other_user,
|
||||
)
|
||||
rule1 = MailRule.objects.create(
|
||||
name="Rule1",
|
||||
account=own_account,
|
||||
folder="INBOX",
|
||||
filter_from="from@example.com",
|
||||
maximum_age=30,
|
||||
action=MailRule.MailAction.MARK_READ,
|
||||
assign_title_from=MailRule.TitleSource.FROM_SUBJECT,
|
||||
assign_correspondent_from=MailRule.CorrespondentSource.FROM_NOTHING,
|
||||
order=0,
|
||||
attachment_type=MailRule.AttachmentProcessing.ATTACHMENTS_ONLY,
|
||||
)
|
||||
foreign_account = MailAccountFactory(owner=other_user)
|
||||
rule1 = MailRuleFactory(account=own_account)
|
||||
|
||||
response = self.client.patch(
|
||||
f"{self.ENDPOINT}{rule1.pk}/",
|
||||
@@ -753,54 +568,11 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
|
||||
"""
|
||||
|
||||
user2 = User.objects.create_user(username="temp_admin2")
|
||||
|
||||
account1 = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
|
||||
rule1 = MailRule.objects.create(
|
||||
name="Rule1",
|
||||
account=account1,
|
||||
folder="INBOX",
|
||||
filter_from="from@example1.com",
|
||||
order=0,
|
||||
)
|
||||
|
||||
rule2 = MailRule.objects.create(
|
||||
name="Rule2",
|
||||
account=account1,
|
||||
folder="INBOX",
|
||||
filter_from="from@example2.com",
|
||||
order=1,
|
||||
)
|
||||
rule2.owner = self.user
|
||||
rule2.save()
|
||||
|
||||
rule3 = MailRule.objects.create(
|
||||
name="Rule3",
|
||||
account=account1,
|
||||
folder="INBOX",
|
||||
filter_from="from@example3.com",
|
||||
order=2,
|
||||
)
|
||||
rule3.owner = user2
|
||||
rule3.save()
|
||||
|
||||
rule4 = MailRule.objects.create(
|
||||
name="Rule4",
|
||||
account=account1,
|
||||
folder="INBOX",
|
||||
filter_from="from@example4.com",
|
||||
order=3,
|
||||
)
|
||||
rule4.owner = user2
|
||||
rule4.save()
|
||||
account1 = MailAccountFactory()
|
||||
rule1 = MailRuleFactory(account=account1, order=0)
|
||||
rule2 = MailRuleFactory(account=account1, order=1, owner=self.user)
|
||||
MailRuleFactory(account=account1, order=2, owner=user2)
|
||||
rule4 = MailRuleFactory(account=account1, order=3, owner=user2)
|
||||
assign_perm("view_mailrule", self.user, rule4)
|
||||
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
@@ -820,15 +592,7 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- The API should reject the request
|
||||
"""
|
||||
account = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
account = MailAccountFactory()
|
||||
|
||||
rule_data = {
|
||||
"name": "Rule1",
|
||||
@@ -874,72 +638,16 @@ class TestAPIProcessedMails(DirectoriesMixin, APITestCase):
|
||||
- Only unowned, owned by user or granted processed mails are provided
|
||||
"""
|
||||
user2 = User.objects.create_user(username="temp_admin2")
|
||||
|
||||
account = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
|
||||
rule = MailRule.objects.create(
|
||||
name="Rule1",
|
||||
account=account,
|
||||
folder="INBOX",
|
||||
filter_from="from@example.com",
|
||||
order=0,
|
||||
)
|
||||
|
||||
pm1 = ProcessedMail.objects.create(
|
||||
rule = MailRuleFactory()
|
||||
pm1 = ProcessedMailFactory(rule=rule)
|
||||
pm2 = ProcessedMailFactory(
|
||||
rule=rule,
|
||||
folder="INBOX",
|
||||
uid="1",
|
||||
subject="Subj1",
|
||||
received=timezone.now(),
|
||||
processed=timezone.now(),
|
||||
status="SUCCESS",
|
||||
error=None,
|
||||
)
|
||||
|
||||
pm2 = ProcessedMail.objects.create(
|
||||
rule=rule,
|
||||
folder="INBOX",
|
||||
uid="2",
|
||||
subject="Subj2",
|
||||
received=timezone.now(),
|
||||
processed=timezone.now(),
|
||||
status="FAILED",
|
||||
error="err",
|
||||
owner=self.user,
|
||||
)
|
||||
|
||||
ProcessedMail.objects.create(
|
||||
rule=rule,
|
||||
folder="INBOX",
|
||||
uid="3",
|
||||
subject="Subj3",
|
||||
received=timezone.now(),
|
||||
processed=timezone.now(),
|
||||
status="SUCCESS",
|
||||
error=None,
|
||||
owner=user2,
|
||||
)
|
||||
|
||||
pm4 = ProcessedMail.objects.create(
|
||||
rule=rule,
|
||||
folder="INBOX",
|
||||
uid="4",
|
||||
subject="Subj4",
|
||||
received=timezone.now(),
|
||||
processed=timezone.now(),
|
||||
status="SUCCESS",
|
||||
error=None,
|
||||
)
|
||||
pm4.owner = user2
|
||||
pm4.save()
|
||||
ProcessedMailFactory(rule=rule, owner=user2)
|
||||
pm4 = ProcessedMailFactory(rule=rule, owner=user2)
|
||||
assign_perm("view_processedmail", self.user, pm4)
|
||||
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
@@ -958,62 +666,12 @@ class TestAPIProcessedMails(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- Only processed mails for that rule are returned
|
||||
"""
|
||||
account = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
|
||||
rule1 = MailRule.objects.create(
|
||||
name="Rule1",
|
||||
account=account,
|
||||
folder="INBOX",
|
||||
filter_from="from1@example.com",
|
||||
order=0,
|
||||
)
|
||||
rule2 = MailRule.objects.create(
|
||||
name="Rule2",
|
||||
account=account,
|
||||
folder="INBOX",
|
||||
filter_from="from2@example.com",
|
||||
order=1,
|
||||
)
|
||||
|
||||
pm1 = ProcessedMail.objects.create(
|
||||
rule=rule1,
|
||||
folder="INBOX",
|
||||
uid="r1-1",
|
||||
subject="R1-A",
|
||||
received=timezone.now(),
|
||||
processed=timezone.now(),
|
||||
status="SUCCESS",
|
||||
error=None,
|
||||
owner=self.user,
|
||||
)
|
||||
pm2 = ProcessedMail.objects.create(
|
||||
rule=rule1,
|
||||
folder="INBOX",
|
||||
uid="r1-2",
|
||||
subject="R1-B",
|
||||
received=timezone.now(),
|
||||
processed=timezone.now(),
|
||||
status="FAILED",
|
||||
error="e",
|
||||
)
|
||||
ProcessedMail.objects.create(
|
||||
rule=rule2,
|
||||
folder="INBOX",
|
||||
uid="r2-1",
|
||||
subject="R2-A",
|
||||
received=timezone.now(),
|
||||
processed=timezone.now(),
|
||||
status="SUCCESS",
|
||||
error=None,
|
||||
)
|
||||
account = MailAccountFactory()
|
||||
rule1 = MailRuleFactory(account=account)
|
||||
rule2 = MailRuleFactory(account=account)
|
||||
pm1 = ProcessedMailFactory(rule=rule1, owner=self.user)
|
||||
pm2 = ProcessedMailFactory(rule=rule1, status="FAILED", error="e")
|
||||
ProcessedMailFactory(rule=rule2)
|
||||
|
||||
response = self.client.get(f"{self.ENDPOINT}?rule={rule1.pk}")
|
||||
|
||||
@@ -1031,70 +689,18 @@ class TestAPIProcessedMails(DirectoriesMixin, APITestCase):
|
||||
- Only the specified processed mails are deleted, respecting ownership and permissions
|
||||
"""
|
||||
user2 = User.objects.create_user(username="temp_admin2")
|
||||
|
||||
account = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
|
||||
rule = MailRule.objects.create(
|
||||
name="Rule1",
|
||||
account=account,
|
||||
folder="INBOX",
|
||||
filter_from="from@example.com",
|
||||
order=0,
|
||||
)
|
||||
|
||||
# unowned and owned by self, and one with explicit object perm
|
||||
pm_unowned = ProcessedMail.objects.create(
|
||||
rule = MailRuleFactory()
|
||||
# unowned, owned by self, and one with explicit object perm
|
||||
pm_unowned = ProcessedMailFactory(rule=rule)
|
||||
pm_owned = ProcessedMailFactory(
|
||||
rule=rule,
|
||||
folder="INBOX",
|
||||
uid="u1",
|
||||
subject="Unowned",
|
||||
received=timezone.now(),
|
||||
processed=timezone.now(),
|
||||
status="SUCCESS",
|
||||
error=None,
|
||||
)
|
||||
pm_owned = ProcessedMail.objects.create(
|
||||
rule=rule,
|
||||
folder="INBOX",
|
||||
uid="u2",
|
||||
subject="Owned",
|
||||
received=timezone.now(),
|
||||
processed=timezone.now(),
|
||||
status="FAILED",
|
||||
error="e",
|
||||
owner=self.user,
|
||||
)
|
||||
pm_granted = ProcessedMail.objects.create(
|
||||
rule=rule,
|
||||
folder="INBOX",
|
||||
uid="u3",
|
||||
subject="Granted",
|
||||
received=timezone.now(),
|
||||
processed=timezone.now(),
|
||||
status="SUCCESS",
|
||||
error=None,
|
||||
owner=user2,
|
||||
)
|
||||
pm_granted = ProcessedMailFactory(rule=rule, owner=user2)
|
||||
assign_perm("delete_processedmail", self.user, pm_granted)
|
||||
pm_forbidden = ProcessedMail.objects.create(
|
||||
rule=rule,
|
||||
folder="INBOX",
|
||||
uid="u4",
|
||||
subject="Forbidden",
|
||||
received=timezone.now(),
|
||||
processed=timezone.now(),
|
||||
status="SUCCESS",
|
||||
error=None,
|
||||
owner=user2,
|
||||
)
|
||||
pm_forbidden = ProcessedMailFactory(rule=rule, owner=user2)
|
||||
|
||||
# Success for allowed items
|
||||
response = self.client.post(
|
||||
|
||||
@@ -28,6 +28,7 @@ from rest_framework.test import APITestCase
|
||||
|
||||
from documents.models import Correspondent
|
||||
from documents.models import MatchingModel
|
||||
from documents.tests.factories import CorrespondentFactory
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from documents.tests.utils import FileSystemAssertsMixin
|
||||
from paperless_mail import tasks
|
||||
@@ -38,6 +39,8 @@ from paperless_mail.mail import apply_mail_action
|
||||
from paperless_mail.models import MailAccount
|
||||
from paperless_mail.models import MailRule
|
||||
from paperless_mail.models import ProcessedMail
|
||||
from paperless_mail.tests.factories import MailAccountFactory
|
||||
from paperless_mail.tests.factories import MailRuleFactory
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
@@ -359,7 +362,8 @@ class MailMocker(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
consume_tasks,
|
||||
expected_signatures,
|
||||
):
|
||||
input_doc, overrides = consume_task.args
|
||||
input_doc = consume_task.kwargs["input_doc"]
|
||||
overrides = consume_task.kwargs["overrides"]
|
||||
|
||||
# assert the file exists
|
||||
self.assertIsFile(input_doc.original_file)
|
||||
@@ -432,8 +436,8 @@ class TestMail(
|
||||
"fake@localhost.com",
|
||||
)
|
||||
|
||||
me_localhost = Correspondent.objects.create(name=message2.from_)
|
||||
someone_else = Correspondent.objects.create(name="someone else")
|
||||
me_localhost = CorrespondentFactory(name=message2.from_)
|
||||
someone_else = CorrespondentFactory(name="someone else")
|
||||
|
||||
handler = MailAccountHandler()
|
||||
|
||||
@@ -1574,21 +1578,8 @@ class TestMail(
|
||||
|
||||
class TestPostConsumeAction(TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.account = MailAccount.objects.create(
|
||||
name="test",
|
||||
imap_server="imap.test.com",
|
||||
imap_port=993,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
username="testuser",
|
||||
password="password",
|
||||
)
|
||||
self.rule = MailRule.objects.create(
|
||||
name="testrule",
|
||||
account=self.account,
|
||||
action=MailRule.MailAction.MARK_READ,
|
||||
action_parameter="",
|
||||
folder="INBOX",
|
||||
)
|
||||
self.account = MailAccountFactory()
|
||||
self.rule = MailRuleFactory(account=self.account)
|
||||
self.message_uid = "12345"
|
||||
self.message_subject = "Test Subject"
|
||||
self.message_date = timezone.make_aware(timezone.datetime(2023, 1, 1, 12, 0, 0))
|
||||
@@ -2022,7 +2013,7 @@ class TestMailAccountProcess(APITestCase):
|
||||
)
|
||||
self.url = f"/api/mail_accounts/{self.account.pk}/process/"
|
||||
|
||||
@mock.patch("paperless_mail.tasks.process_mail_accounts.delay")
|
||||
@mock.patch("paperless_mail.tasks.process_mail_accounts.apply_async")
|
||||
def test_mail_account_process_view(self, m) -> None:
|
||||
response = self.client.post(self.url)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
@@ -2036,15 +2027,7 @@ class TestMailRuleAPI(APITestCase):
|
||||
password="testpassword",
|
||||
)
|
||||
self.client.force_authenticate(user=self.user)
|
||||
self.account = MailAccount.objects.create(
|
||||
imap_server="imap.example.com",
|
||||
imap_port=993,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
username="admin",
|
||||
password="secret",
|
||||
account_type=MailAccount.MailAccountType.IMAP,
|
||||
owner=self.user,
|
||||
)
|
||||
self.account = MailAccountFactory(owner=self.user)
|
||||
self.url = "/api/mail_rules/"
|
||||
|
||||
def test_create_mail_rule(self) -> None:
|
||||
|
||||
@@ -13,6 +13,7 @@ from rest_framework import status
|
||||
from paperless_mail.mail import MailAccountHandler
|
||||
from paperless_mail.models import MailAccount
|
||||
from paperless_mail.oauth import PaperlessMailOAuth2Manager
|
||||
from paperless_mail.tests.factories import MailAccountFactory
|
||||
|
||||
|
||||
@override_settings(
|
||||
@@ -289,11 +290,9 @@ class TestMailOAuth(
|
||||
mock_mailbox = mock.MagicMock()
|
||||
mock_get_mailbox.return_value.__enter__.return_value = mock_mailbox
|
||||
|
||||
mail_account = MailAccount.objects.create(
|
||||
mail_account = MailAccountFactory(
|
||||
name="Test Gmail Mail Account",
|
||||
username="test_username",
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
imap_port=993,
|
||||
account_type=MailAccount.MailAccountType.GMAIL_OAUTH,
|
||||
is_token=True,
|
||||
refresh_token="test_refresh_token",
|
||||
@@ -315,11 +314,9 @@ class TestMailOAuth(
|
||||
"refresh_token": "test_refresh",
|
||||
"expires_in": 3600,
|
||||
}
|
||||
outlook_mail_account = MailAccount.objects.create(
|
||||
outlook_mail_account = MailAccountFactory(
|
||||
name="Test Outlook Mail Account",
|
||||
username="test_username",
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
imap_port=993,
|
||||
account_type=MailAccount.MailAccountType.OUTLOOK_OAUTH,
|
||||
is_token=True,
|
||||
refresh_token="test_refresh_token",
|
||||
@@ -352,11 +349,9 @@ class TestMailOAuth(
|
||||
mock_mailbox = mock.MagicMock()
|
||||
mock_get_mailbox.return_value.__enter__.return_value = mock_mailbox
|
||||
|
||||
mail_account = MailAccount.objects.create(
|
||||
mail_account = MailAccountFactory(
|
||||
name="Test Gmail Mail Account",
|
||||
username="test_username",
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
imap_port=993,
|
||||
account_type=MailAccount.MailAccountType.GMAIL_OAUTH,
|
||||
is_token=True,
|
||||
refresh_token="test_refresh_token",
|
||||
|
||||
@@ -13,9 +13,9 @@ from django.test import override_settings
|
||||
from imap_tools import MailMessage
|
||||
|
||||
from paperless_mail.mail import MailAccountHandler
|
||||
from paperless_mail.models import MailAccount
|
||||
from paperless_mail.models import MailRule
|
||||
from paperless_mail.preprocessor import MailMessageDecryptor
|
||||
from paperless_mail.tests.factories import MailAccountFactory
|
||||
from paperless_mail.tests.test_mail import TestMail
|
||||
from paperless_mail.tests.test_mail import _AttachmentDef
|
||||
|
||||
@@ -251,7 +251,7 @@ class TestMailMessageGpgDecryptor(TestMail):
|
||||
|
||||
encrypted_message = self.messageEncryptor.encrypt(message)
|
||||
|
||||
account = MailAccount.objects.create()
|
||||
account = MailAccountFactory()
|
||||
rule = MailRule(
|
||||
assign_title_from=MailRule.TitleSource.FROM_FILENAME,
|
||||
consumption_scope=MailRule.ConsumptionScope.EVERYTHING,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import datetime
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from http import HTTPStatus
|
||||
from typing import Any
|
||||
|
||||
from django.http import HttpResponseBadRequest
|
||||
@@ -23,6 +24,7 @@ from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
|
||||
from documents.filters import ObjectOwnedOrGrantedPermissionsFilter
|
||||
from documents.models import PaperlessTask
|
||||
from documents.permissions import PaperlessObjectPermissions
|
||||
from documents.permissions import has_perms_owner_aware
|
||||
from documents.views import PassUserMixin
|
||||
@@ -155,11 +157,39 @@ class MailAccountViewSet(PassUserMixin, ModelViewSet[MailAccount]):
|
||||
@action(methods=["post"], detail=True)
|
||||
def process(self, request, pk=None):
|
||||
account = self.get_object()
|
||||
process_mail_accounts.delay([account.pk])
|
||||
process_mail_accounts.apply_async(
|
||||
kwargs={"account_ids": [account.pk]},
|
||||
headers={"trigger_source": PaperlessTask.TriggerSource.MANUAL},
|
||||
)
|
||||
|
||||
return Response({"result": "OK"})
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
bulk_delete=extend_schema(
|
||||
operation_id="processed_mail_bulk_delete",
|
||||
description="Delete multiple processed mail records by ID.",
|
||||
request=inline_serializer(
|
||||
name="BulkDeleteMailRequest",
|
||||
fields={
|
||||
"mail_ids": serializers.ListField(child=serializers.IntegerField()),
|
||||
},
|
||||
),
|
||||
responses={
|
||||
(HTTPStatus.OK, "application/json"): inline_serializer(
|
||||
name="BulkDeleteMailResponse",
|
||||
fields={
|
||||
"result": serializers.CharField(),
|
||||
"deleted_mail_ids": serializers.ListField(
|
||||
child=serializers.IntegerField(),
|
||||
),
|
||||
},
|
||||
),
|
||||
HTTPStatus.BAD_REQUEST: None,
|
||||
HTTPStatus.FORBIDDEN: None,
|
||||
},
|
||||
),
|
||||
)
|
||||
class ProcessedMailViewSet(PassUserMixin, ReadOnlyModelViewSet[ProcessedMail]):
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
serializer_class = ProcessedMailSerializer
|
||||
|
||||
Reference in New Issue
Block a user