Compare commits

...

18 Commits

Author SHA1 Message Date
shamoon
f7118f61a0 Prevent duplicate mail processing across rules 2026-02-24 11:44:00 -08:00
GitHub Actions
e08287f791 Auto translate strings 2026-02-24 00:44:37 +00:00
Jan Kleine
c4ea332c61 Feature: move to trash action for workflows (#11176)
Co-authored-by: shamoon <4887959+shamoon@users.noreply.github.com>
2026-02-23 16:42:50 -08:00
shamoon
fa13ca7a42 Fix: pass api_base to OpenAIEmbedding (#12151) 2026-02-23 13:47:32 -08:00
Trenton H
814f57b099 Allows the typing job to error and still pass, so we get results, but not failures for now (#12147) 2026-02-23 09:44:35 -08:00
GitHub Actions
be7f1c6233 Auto translate strings 2026-02-22 23:18:50 +00:00
shamoon
d6cd6d0311 Tweakhancement: reset to page 1 on reset filters (#12143) 2026-02-22 15:17:02 -08:00
Daniel Herrmann
095ea3cbd3 Documentation: clarify behaviour around document splitting (#12137)
Co-Authored-By: shamoon <4887959+shamoon@users.noreply.github.com>
2026-02-22 08:26:38 -08:00
shamoon
5b667621cd Try not to piss off mypy 2026-02-21 17:48:11 -08:00
shamoon
1b912c137a Merge branch 'main' into dev 2026-02-21 17:45:28 -08:00
github-actions[bot]
98298e37cd Changelog v2.20.8 - GHA (#12135)
Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
2026-02-21 17:43:19 -08:00
shamoon
35be0850ec Bump version to 2.20.8 2026-02-21 16:49:52 -08:00
shamoon
1bb4b9b473 More permissions on mail account test endpoint 2026-02-21 16:47:55 -08:00
shamoon
f85094dc2b Set owner on OAuth mail credentials 2026-02-21 16:37:32 -08:00
shamoon
65ca78e9e7 Security: fix/GHSA-7qqc-wrcw-2fj9 2026-02-21 16:34:33 -08:00
GitHub Actions
57c5939d7b Auto translate strings 2026-02-20 20:55:01 +00:00
shamoon
43fe932c57 Fix: unify POSTs when toggling sidebar to prevent db lock (#12129) 2026-02-20 12:53:16 -08:00
shamoon
83f68d6063 Fix mailrule_stop_processing migration 2026-02-20 10:19:16 -08:00
37 changed files with 1227 additions and 167 deletions

View File

@@ -129,6 +129,7 @@ jobs:
run: |
uv pip list
- name: Check typing (pyrefly)
continue-on-error: true
run: |
uv run pyrefly \
check \
@@ -143,6 +144,7 @@ jobs:
${{ runner.os }}-mypy-py${{ env.DEFAULT_PYTHON }}-
${{ runner.os }}-mypy-
- name: Check typing (mypy)
continue-on-error: true
run: |
uv run mypy \
--show-error-codes \

View File

@@ -700,15 +700,11 @@ src/documents/signals/handlers.py:0: error: Function is missing a type annotatio
src/documents/signals/handlers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
src/documents/signals/handlers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
src/documents/signals/handlers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
src/documents/signals/handlers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
src/documents/signals/handlers.py:0: error: Incompatible return value type (got "tuple[DocumentMetadataOverrides | None, str]", expected "tuple[DocumentMetadataOverrides, str] | None") [return-value]
src/documents/signals/handlers.py:0: error: Incompatible types in assignment (expression has type "list[Tag]", variable has type "set[Tag]") [assignment]
src/documents/signals/handlers.py:0: error: Incompatible types in assignment (expression has type "tuple[Any, Any, Any]", variable has type "tuple[Any, Any]") [assignment]
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "refresh_from_db" [union-attr]
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "save" [union-attr]
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "source_path" [union-attr]
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "tags" [union-attr]
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "tags" [union-attr]
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "title" [union-attr]
src/documents/signals/handlers.py:0: error: Item "None" of "Any | None" has no attribute "get" [union-attr]
src/documents/signals/handlers.py:0: error: Item "None" of "Any | None" has no attribute "get" [union-attr]
@@ -2192,34 +2188,34 @@ src/paperless_mail/tests/test_mail.py:0: error: "MailMessage" has no attribute "
src/paperless_mail/tests/test_mail.py:0: error: "MailMessage" has no attribute "flagged" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "MailMessage" has no attribute "seen" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "MailMessage" has no attribute "seen" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[att@480]" has no attribute "filename" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message2@426]" has no attribute "from_" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message2@426]" has no attribute "from_" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message2@426]" has no attribute "from_values" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message@419]" has no attribute "from_" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message@419]" has no attribute "from_values" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message@478]" has no attribute "subject" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message@531]" has no attribute "attachments" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[att@481]" has no attribute "filename" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message2@427]" has no attribute "from_" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message2@427]" has no attribute "from_" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message2@427]" has no attribute "from_values" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message@420]" has no attribute "from_" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message@420]" has no attribute "from_values" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message@479]" has no attribute "subject" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: "type[message@532]" has no attribute "attachments" [attr-defined]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "MailboxFolderSelectError" has incompatible type "None"; expected "tuple[Any, ...]" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "MailboxFolderSelectError" has incompatible type "None"; expected "tuple[Any, ...]" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "MailboxLoginError" has incompatible type "str"; expected "tuple[Any, ...]" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "MailboxLoginError" has incompatible type "str"; expected "tuple[Any, ...]" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "MailboxLoginError" has incompatible type "str"; expected "tuple[Any, ...]" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "MailboxLoginError" has incompatible type "str"; expected "tuple[Any, ...]" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message2@426]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message2@426]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message@419]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message@419]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message@419]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message@419]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_title" of "MailAccountHandler" has incompatible type "type[message@478]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_title" of "MailAccountHandler" has incompatible type "type[message@478]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_title" of "MailAccountHandler" has incompatible type "type[message@478]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message2@427]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message2@427]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message@420]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message@420]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message@420]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_correspondent" of "MailAccountHandler" has incompatible type "type[message@420]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_title" of "MailAccountHandler" has incompatible type "type[message@479]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_title" of "MailAccountHandler" has incompatible type "type[message@479]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "_get_title" of "MailAccountHandler" has incompatible type "type[message@479]"; expected "MailMessage" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "filter" has incompatible type "Callable[[Any], bool]"; expected "Callable[[MailMessage], TypeGuard[Never]]" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 1 to "filter" has incompatible type "Callable[[Any], bool]"; expected "Callable[[MailMessage], TypeGuard[Never]]" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 2 to "_get_title" of "MailAccountHandler" has incompatible type "type[att@480]"; expected "MailAttachment" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 2 to "_get_title" of "MailAccountHandler" has incompatible type "type[att@480]"; expected "MailAttachment" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 2 to "_get_title" of "MailAccountHandler" has incompatible type "type[att@480]"; expected "MailAttachment" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 2 to "_get_title" of "MailAccountHandler" has incompatible type "type[att@481]"; expected "MailAttachment" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 2 to "_get_title" of "MailAccountHandler" has incompatible type "type[att@481]"; expected "MailAttachment" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 2 to "_get_title" of "MailAccountHandler" has incompatible type "type[att@481]"; expected "MailAttachment" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Argument 2 to "assertIn" of "TestCase" has incompatible type "str | None"; expected "Iterable[Any] | Container[Any]" [arg-type]
src/paperless_mail/tests/test_mail.py:0: error: Dict entry 0 has incompatible type "str": "None"; expected "str": "str" [dict-item]
src/paperless_mail/tests/test_mail.py:0: error: Dict entry 0 has incompatible type "str": "int"; expected "str": "str" [dict-item]

View File

@@ -784,9 +784,17 @@ below.
### Document Splitting {#document-splitting}
When enabled, Paperless will look for a barcode with the configured value and create a new document
starting from the next page. The page with the barcode on it will _not_ be retained. It
is expected to be a page existing only for triggering the split.
If document splitting is enabled, Paperless splits _after_ a separator barcode by default.
This means:
- any page containing the configured separator barcode starts a new document, starting with the **next** page
- pages containing the separator barcode are discarded
This is intended for dedicated separator sheets such as PATCH-T pages.
If [`PAPERLESS_CONSUMER_BARCODE_RETAIN_SPLIT_PAGES`](configuration.md#PAPERLESS_CONSUMER_BARCODE_RETAIN_SPLIT_PAGES)
is enabled, the page containing the separator barcode is retained instead. In this mode,
each page containing the separator barcode becomes the **first** page of a new document.
### Archive Serial Number Assignment
@@ -795,8 +803,9 @@ archive serial number, allowing quick reference back to the original, paper docu
If document splitting via barcode is also enabled, documents will be split when an ASN
barcode is located. However, differing from the splitting, the page with the
barcode _will_ be retained. This allows application of a barcode to any page, including
one which holds data to keep in the document.
barcode _will_ be retained. Each detected ASN barcode starts a new document _starting with
that page_. This allows placing ASN barcodes on content pages that should remain part of
the document.
### Tag Assignment

View File

@@ -1,5 +1,7 @@
# Changelog
## paperless-ngx 2.20.8
## paperless-ngx 2.20.7
### Bug Fixes

View File

@@ -564,6 +564,18 @@ For security reasons, webhooks can be limited to specific ports and disallowed f
[configuration settings](configuration.md#workflow-webhooks) to change this behavior. If you are allowing non-admins to create workflows,
you may want to adjust these settings to prevent abuse.
##### Move to Trash {#workflow-action-move-to-trash}
"Move to Trash" actions move the document to the trash. The document can be restored
from the trash until the trash is emptied (after the configured delay or manually).
The "Move to Trash" action will always be executed at the end of the workflow run,
regardless of its position in the action list. After a "Move to Trash" action is executed
no other workflow will be executed on the document.
If a "Move to Trash" action is executed in a consume pipeline, the consumption
will be aborted and the file will be deleted.
#### Workflow placeholders
Titles and webhook payloads can be generated by workflows using [Jinja templates](https://jinja.palletsprojects.com/en/3.1.x/templates/).

View File

@@ -1,6 +1,6 @@
[project]
name = "paperless-ngx"
version = "2.20.7"
version = "2.20.8"
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
readme = "README.md"
requires-python = ">=3.10"

View File

@@ -1781,11 +1781,15 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.ts</context>
<context context-type="linenumber">216</context>
<context context-type="linenumber">156</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.ts</context>
<context context-type="linenumber">241</context>
<context context-type="linenumber">230</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.ts</context>
<context context-type="linenumber">255</context>
</context-group>
</trans-unit>
<trans-unit id="2991443309752293110" datatype="html">
@@ -3113,21 +3117,21 @@
<source>Sidebar views updated</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.ts</context>
<context context-type="linenumber">329</context>
<context context-type="linenumber">343</context>
</context-group>
</trans-unit>
<trans-unit id="3547923076537026828" datatype="html">
<source>Error updating sidebar views</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.ts</context>
<context context-type="linenumber">332</context>
<context context-type="linenumber">346</context>
</context-group>
</trans-unit>
<trans-unit id="2526035785704676448" datatype="html">
<source>An error occurred while saving update checking settings.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.ts</context>
<context context-type="linenumber">353</context>
<context context-type="linenumber">367</context>
</context-group>
</trans-unit>
<trans-unit id="4580988005648117665" datatype="html">
@@ -5351,6 +5355,13 @@
<context context-type="linenumber">445</context>
</context-group>
</trans-unit>
<trans-unit id="7902569198692046993" datatype="html">
<source>The document will be moved to the trash at the end of the workflow run.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.html</context>
<context context-type="linenumber">454</context>
</context-group>
</trans-unit>
<trans-unit id="4626030417479279989" datatype="html">
<source>Consume Folder</source>
<context-group purpose="location">
@@ -5453,109 +5464,124 @@
<context context-type="linenumber">144</context>
</context-group>
</trans-unit>
<trans-unit id="2048798344356757326" datatype="html">
<source>Move to trash</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">148</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1087</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">760</context>
</context-group>
</trans-unit>
<trans-unit id="4522609911791833187" datatype="html">
<source>Has any of these tags</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">213</context>
<context context-type="linenumber">217</context>
</context-group>
</trans-unit>
<trans-unit id="4166903555074156852" datatype="html">
<source>Has all of these tags</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">220</context>
<context context-type="linenumber">224</context>
</context-group>
</trans-unit>
<trans-unit id="6624363795312783141" datatype="html">
<source>Does not have these tags</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">227</context>
<context context-type="linenumber">231</context>
</context-group>
</trans-unit>
<trans-unit id="7168528512669831184" datatype="html">
<source>Has any of these correspondents</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">234</context>
<context context-type="linenumber">238</context>
</context-group>
</trans-unit>
<trans-unit id="5281365940563983618" datatype="html">
<source>Has correspondent</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">242</context>
<context context-type="linenumber">246</context>
</context-group>
</trans-unit>
<trans-unit id="6884498632428600393" datatype="html">
<source>Does not have correspondents</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">250</context>
<context context-type="linenumber">254</context>
</context-group>
</trans-unit>
<trans-unit id="4806713133917046341" datatype="html">
<source>Has document type</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">258</context>
<context context-type="linenumber">262</context>
</context-group>
</trans-unit>
<trans-unit id="8801397520369995032" datatype="html">
<source>Has any of these document types</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">266</context>
<context context-type="linenumber">270</context>
</context-group>
</trans-unit>
<trans-unit id="1507843981661822403" datatype="html">
<source>Does not have document types</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">274</context>
<context context-type="linenumber">278</context>
</context-group>
</trans-unit>
<trans-unit id="4277260190522078330" datatype="html">
<source>Has storage path</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">282</context>
<context context-type="linenumber">286</context>
</context-group>
</trans-unit>
<trans-unit id="8858580062214623097" datatype="html">
<source>Has any of these storage paths</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">290</context>
<context context-type="linenumber">294</context>
</context-group>
</trans-unit>
<trans-unit id="6070943364927280151" datatype="html">
<source>Does not have storage paths</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">298</context>
<context context-type="linenumber">302</context>
</context-group>
</trans-unit>
<trans-unit id="6250799006816371860" datatype="html">
<source>Matches custom field query</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">306</context>
<context context-type="linenumber">310</context>
</context-group>
</trans-unit>
<trans-unit id="3138206142174978019" datatype="html">
<source>Create new workflow</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">535</context>
<context context-type="linenumber">539</context>
</context-group>
</trans-unit>
<trans-unit id="5996779210524133604" datatype="html">
<source>Edit workflow</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
<context context-type="linenumber">539</context>
<context context-type="linenumber">543</context>
</context-group>
</trans-unit>
<trans-unit id="5457837313196342910" datatype="html">
@@ -7769,17 +7795,6 @@
<context context-type="linenumber">758</context>
</context-group>
</trans-unit>
<trans-unit id="2048798344356757326" datatype="html">
<source>Move to trash</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1087</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">760</context>
</context-group>
</trans-unit>
<trans-unit id="7295637485862454066" datatype="html">
<source>Error deleting document</source>
<context-group purpose="location">
@@ -8486,7 +8501,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/document-list.component.ts</context>
<context context-type="linenumber">315</context>
<context context-type="linenumber">323</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/document-attributes/document-attributes.component.html</context>
@@ -8501,7 +8516,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/document-list.component.ts</context>
<context context-type="linenumber">308</context>
<context context-type="linenumber">316</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/document-attributes/document-attributes.component.html</context>
@@ -8767,49 +8782,49 @@
<source>Reset filters / selection</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/document-list.component.ts</context>
<context context-type="linenumber">296</context>
<context context-type="linenumber">304</context>
</context-group>
</trans-unit>
<trans-unit id="4135055128446167640" datatype="html">
<source>Open first [selected] document</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/document-list.component.ts</context>
<context context-type="linenumber">324</context>
<context context-type="linenumber">332</context>
</context-group>
</trans-unit>
<trans-unit id="3629960544875360046" datatype="html">
<source>Previous page</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/document-list.component.ts</context>
<context context-type="linenumber">340</context>
<context context-type="linenumber">348</context>
</context-group>
</trans-unit>
<trans-unit id="3337301694210287595" datatype="html">
<source>Next page</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/document-list.component.ts</context>
<context context-type="linenumber">352</context>
<context context-type="linenumber">360</context>
</context-group>
</trans-unit>
<trans-unit id="2155249406916744630" datatype="html">
<source>View &quot;<x id="PH" equiv-text="this.list.activeSavedViewTitle"/>&quot; saved successfully.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/document-list.component.ts</context>
<context context-type="linenumber">385</context>
<context context-type="linenumber">393</context>
</context-group>
</trans-unit>
<trans-unit id="4646273665293421938" datatype="html">
<source>Failed to save view &quot;<x id="PH" equiv-text="this.list.activeSavedViewTitle"/>&quot;.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/document-list.component.ts</context>
<context context-type="linenumber">391</context>
<context context-type="linenumber">399</context>
</context-group>
</trans-unit>
<trans-unit id="6837554170707123455" datatype="html">
<source>View &quot;<x id="PH" equiv-text="savedView.name"/>&quot; created successfully.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/document-list.component.ts</context>
<context context-type="linenumber">437</context>
<context context-type="linenumber">445</context>
</context-group>
</trans-unit>
<trans-unit id="739880801667335279" datatype="html">

View File

@@ -1,6 +1,6 @@
{
"name": "paperless-ngx-ui",
"version": "2.20.7",
"version": "2.20.8",
"scripts": {
"preinstall": "npx only-allow pnpm",
"ng": "ng",

View File

@@ -243,9 +243,19 @@ describe('AppFrameComponent', () => {
it('should support toggling slim sidebar and saving', fakeAsync(() => {
const saveSettingSpy = jest.spyOn(settingsService, 'set')
settingsService.set(SETTINGS_KEYS.ATTRIBUTES_SECTIONS_COLLAPSED, [])
expect(component.slimSidebarEnabled).toBeFalsy()
expect(component.slimSidebarAnimating).toBeFalsy()
component.toggleSlimSidebar()
const requests = httpTestingController.match(
`${environment.apiBaseUrl}ui_settings/`
)
expect(requests).toHaveLength(1)
expect(requests[0].request.body.settings.slim_sidebar).toBe(true)
expect(
requests[0].request.body.settings.attributes_sections_collapsed
).toEqual(['attributes'])
requests[0].flush({ success: true })
expect(component.slimSidebarAnimating).toBeTruthy()
tick(200)
expect(component.slimSidebarAnimating).toBeFalsy()
@@ -254,6 +264,10 @@ describe('AppFrameComponent', () => {
SETTINGS_KEYS.SLIM_SIDEBAR,
true
)
expect(saveSettingSpy).toHaveBeenCalledWith(
SETTINGS_KEYS.ATTRIBUTES_SECTIONS_COLLAPSED,
['attributes']
)
}))
it('should show error on toggle slim sidebar if store settings fails', () => {

View File

@@ -140,10 +140,24 @@ export class AppFrameComponent
toggleSlimSidebar(): void {
this.slimSidebarAnimating = true
this.slimSidebarEnabled = !this.slimSidebarEnabled
if (this.slimSidebarEnabled) {
this.attributesSectionsCollapsed = true
const slimSidebarEnabled = !this.slimSidebarEnabled
this.settingsService.set(SETTINGS_KEYS.SLIM_SIDEBAR, slimSidebarEnabled)
if (slimSidebarEnabled) {
this.settingsService.set(SETTINGS_KEYS.ATTRIBUTES_SECTIONS_COLLAPSED, [
CollapsibleSection.ATTRIBUTES,
])
}
this.settingsService
.storeSettings()
.pipe(first())
.subscribe({
error: (error) => {
this.toastService.showError(
$localize`An error occurred while saving settings.`
)
console.warn(error)
},
})
setTimeout(() => {
this.slimSidebarAnimating = false
}, 200) // slightly longer than css animation for slim sidebar

View File

@@ -448,6 +448,13 @@
</div>
</div>
}
@case (WorkflowActionType.MoveToTrash) {
<div class="row">
<div class="col">
<p class="text-muted small" i18n>The document will be moved to the trash at the end of the workflow run.</p>
</div>
</div>
}
}
</div>
</ng-template>

View File

@@ -143,6 +143,10 @@ export const WORKFLOW_ACTION_OPTIONS = [
id: WorkflowActionType.PasswordRemoval,
name: $localize`Password removal`,
},
{
id: WorkflowActionType.MoveToTrash,
name: $localize`Move to trash`,
},
]
export enum TriggerFilterType {

View File

@@ -117,7 +117,7 @@
</pngx-page-header>
<div class="row sticky-top py-3 mt-n2 mt-md-n3 bg-body">
<pngx-filter-editor [hidden]="isBulkEditing" [disabled]="isBulkEditing" [(filterRules)]="list.filterRules" [unmodifiedFilterRules]="unmodifiedFilterRules" [selectionData]="list.selectionData" #filterEditor></pngx-filter-editor>
<pngx-filter-editor [hidden]="isBulkEditing" [disabled]="isBulkEditing" [filterRules]="list.filterRules" (filterRulesChange)="onFilterRulesChange($event)" (resetFilterRules)="onFilterRulesReset($event)" [unmodifiedFilterRules]="unmodifiedFilterRules" [selectionData]="list.selectionData" #filterEditor></pngx-filter-editor>
<pngx-bulk-editor [hidden]="!isBulkEditing" [disabled]="!isBulkEditing"></pngx-bulk-editor>
</div>

View File

@@ -147,21 +147,21 @@ describe('DocumentListComponent', () => {
})
it('should show score sort fields on fulltext queries', () => {
documentListService.filterRules = [
documentListService.setFilterRules([
{
rule_type: FILTER_HAS_TAGS_ANY,
value: '10',
},
]
])
fixture.detectChanges()
expect(component.getSortFields()).toEqual(documentListService.sortFields)
documentListService.filterRules = [
documentListService.setFilterRules([
{
rule_type: FILTER_FULLTEXT_QUERY,
value: 'foo',
},
]
])
fixture.detectChanges()
expect(component.getSortFields()).toEqual(
documentListService.sortFieldsFullText
@@ -170,12 +170,12 @@ describe('DocumentListComponent', () => {
it('should determine if filtered, support reset', () => {
fixture.detectChanges()
documentListService.filterRules = [
documentListService.setFilterRules([
{
rule_type: FILTER_HAS_TAGS_ANY,
value: '10',
},
]
])
documentListService.isReloading = false
fixture.detectChanges()
expect(component.isFiltered).toBeTruthy()
@@ -185,6 +185,20 @@ describe('DocumentListComponent', () => {
expect(fixture.nativeElement.textContent.match(/Reset/g)).toHaveLength(1)
})
it('should apply filter rule changes via list service', () => {
const setFilterRulesSpy = jest.spyOn(documentListService, 'setFilterRules')
const rules = [{ rule_type: FILTER_HAS_TAGS_ANY, value: '10' }]
component.onFilterRulesChange(rules)
expect(setFilterRulesSpy).toHaveBeenCalledWith(rules)
})
it('should reset filter rules to page one via list service', () => {
const setFilterRulesSpy = jest.spyOn(documentListService, 'setFilterRules')
const rules = [{ rule_type: FILTER_HAS_TAGS_ANY, value: '10' }]
component.onFilterRulesReset(rules)
expect(setFilterRulesSpy).toHaveBeenCalledWith(rules, true)
})
it('should load saved view from URL', () => {
const view: SavedView = {
id: 10,
@@ -217,7 +231,7 @@ describe('DocumentListComponent', () => {
.spyOn(activatedRoute, 'paramMap', 'get')
.mockReturnValue(of(convertToParamMap(queryParams)))
activatedRoute.snapshot.queryParams = queryParams
fixture.detectChanges()
component.ngOnInit()
expect(getSavedViewSpy).toHaveBeenCalledWith(view.id)
expect(activateSavedViewSpy).toHaveBeenCalledWith(
view,

View File

@@ -212,6 +212,14 @@ export class DocumentListComponent
this.list.setSort(event.column, event.reverse)
}
onFilterRulesChange(filterRules: FilterRule[]) {
this.list.setFilterRules(filterRules)
}
onFilterRulesReset(filterRules: FilterRule[]) {
this.list.setFilterRules(filterRules, true)
}
get isBulkEditing(): boolean {
return this.list.selected.size > 0
}
@@ -300,7 +308,7 @@ export class DocumentListComponent
if (this.list.selected.size > 0) {
this.list.selectNone()
} else if (this.isFiltered) {
this.filterEditor.resetSelected()
this.resetFilters()
}
})

View File

@@ -2107,6 +2107,22 @@ describe('FilterEditorComponent', () => {
expect(component.filterRules).toEqual(rules)
})
it('should emit reset filter rules when resetting', () => {
const rules = [{ rule_type: FILTER_HAS_TAGS_ANY, value: '2' }]
component.unmodifiedFilterRules = rules
component.filterRules = [
{ rule_type: FILTER_DOES_NOT_HAVE_TAG, value: '2' },
]
const resetFilterRulesSpy = jest.spyOn(component.resetFilterRules, 'next')
const filterRulesChangeSpy = jest.spyOn(component.filterRulesChange, 'next')
component.resetSelected()
expect(resetFilterRulesSpy).toHaveBeenCalledWith(rules)
expect(filterRulesChangeSpy).not.toHaveBeenCalled()
})
it('should support resetting text field', () => {
component.textFilter = 'foo'
component.resetTextField()

View File

@@ -1101,6 +1101,9 @@ export class FilterEditorComponent
@Output()
filterRulesChange = new EventEmitter<FilterRule[]>()
@Output()
resetFilterRules = new EventEmitter<FilterRule[]>()
@Input()
set selectionData(selectionData: SelectionData) {
this.tagDocumentCounts = selectionData?.selected_tags ?? null
@@ -1244,7 +1247,7 @@ export class FilterEditorComponent
this.textFilterTarget = TEXT_FILTER_TARGET_TITLE_CONTENT
this.documentService.searchQuery = ''
this.filterRules = this._unmodifiedFilterRules
this.updateRules()
this.resetFilterRules.next(this.filterRules)
}
toggleTag(tagId: number) {

View File

@@ -6,6 +6,7 @@ export enum WorkflowActionType {
Email = 3,
Webhook = 4,
PasswordRemoval = 5,
MoveToTrash = 6,
}
export interface WorkflowActionEmail extends ObjectWithId {

View File

@@ -164,7 +164,7 @@ describe('DocumentListViewService', () => {
value: tags__id__in,
},
]
documentListViewService.filterRules = filterRulesAny
documentListViewService.setFilterRules(filterRulesAny)
let req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__in=${tags__id__in}`
)
@@ -178,7 +178,7 @@ describe('DocumentListViewService', () => {
)
expect(req.request.method).toEqual('GET')
// reset the list
documentListViewService.filterRules = []
documentListViewService.setFilterRules([])
req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
)
@@ -210,7 +210,7 @@ describe('DocumentListViewService', () => {
value: tags__id__in,
},
]
documentListViewService.filterRules = filterRulesAny
documentListViewService.setFilterRules(filterRulesAny)
let req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__in=${tags__id__in}`
)
@@ -218,7 +218,7 @@ describe('DocumentListViewService', () => {
req.flush('Generic error', { status: 404, statusText: 'Unexpected error' })
expect(documentListViewService.error).toEqual('Generic error')
// reset the list
documentListViewService.filterRules = []
documentListViewService.setFilterRules([])
req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
)
@@ -295,13 +295,41 @@ describe('DocumentListViewService', () => {
})
it('should use filter rules to update query params', () => {
documentListViewService.filterRules = filterRules
documentListViewService.setFilterRules(filterRules)
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/?page=${documentListViewService.currentPage}&page_size=${documentListViewService.pageSize}&ordering=-created&truncate_content=true&tags__id__all=${tags__id__all}`
)
expect(req.request.method).toEqual('GET')
})
it('should support setting filter rules and resetting to page one', () => {
documentListViewService.currentPage = 2
let req = httpTestingController.expectOne((request) =>
request.urlWithParams.startsWith(
`${environment.apiBaseUrl}documents/?page=2&page_size=50&ordering=-created&truncate_content=true`
)
)
expect(req.request.method).toEqual('GET')
req.flush(full_results)
req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/selection_data/`
)
req.flush([])
documentListViewService.setFilterRules(filterRules, true)
const filteredReqs = httpTestingController.match(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__all=${tags__id__all}`
)
expect(filteredReqs).toHaveLength(1)
filteredReqs[0].flush(full_results)
req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/selection_data/`
)
req.flush([])
expect(documentListViewService.currentPage).toEqual(1)
})
it('should support quick filter', () => {
documentListViewService.quickFilter(filterRules)
const req = httpTestingController.expectOne(
@@ -336,7 +364,7 @@ describe('DocumentListViewService', () => {
req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-added&truncate_content=true&tags__id__all=9`
)
documentListViewService.filterRules = []
documentListViewService.setFilterRules([])
req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-added&truncate_content=true`
)
@@ -348,7 +376,7 @@ describe('DocumentListViewService', () => {
})
it('should support navigating next / previous', () => {
documentListViewService.filterRules = []
documentListViewService.setFilterRules([])
let req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
)
@@ -558,7 +586,7 @@ describe('DocumentListViewService', () => {
req.flush(full_results)
expect(documentListViewService.selected.size).toEqual(6)
documentListViewService.filterRules = filterRules
documentListViewService.setFilterRules(filterRules)
httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__all=9`
)
@@ -592,7 +620,7 @@ describe('DocumentListViewService', () => {
documentListViewService.loadSavedView(view2)
expect(documentListViewService.sortField).toEqual('score')
documentListViewService.filterRules = []
documentListViewService.setFilterRules([])
expect(documentListViewService.sortField).toEqual('created')
httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`

View File

@@ -342,7 +342,7 @@ export class DocumentListViewService {
})
}
set filterRules(filterRules: FilterRule[]) {
setFilterRules(filterRules: FilterRule[], resetPage: boolean = false) {
if (
!isFullTextFilterRule(filterRules) &&
this.activeListViewState.sortField == 'score'
@@ -350,6 +350,9 @@ export class DocumentListViewService {
this.activeListViewState.sortField = 'created'
}
this.activeListViewState.filterRules = filterRules
if (resetPage) {
this.activeListViewState.currentPage = 1
}
this.reload()
this.reduceSelectionToFilter()
this.saveDocumentListView()
@@ -479,7 +482,7 @@ export class DocumentListViewService {
quickFilter(filterRules: FilterRule[]) {
this._activeSavedViewId = null
this.filterRules = filterRules
this.setFilterRules(filterRules)
this.router.navigate(['documents'])
}

View File

@@ -6,7 +6,7 @@ export const environment = {
apiVersion: '9', // match src/paperless/settings.py
appTitle: 'Paperless-ngx',
tag: 'prod',
version: '2.20.7',
version: '2.20.8',
webSocketHost: window.location.host,
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
webSocketBaseUrl: base_url.pathname + 'ws/',

View File

@@ -0,0 +1,29 @@
# Generated by Django 5.2.11 on 2026-02-14 19:19
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0011_optimize_integer_field_sizes"),
]
operations = [
migrations.AlterField(
model_name="workflowaction",
name="type",
field=models.PositiveSmallIntegerField(
choices=[
(1, "Assignment"),
(2, "Removal"),
(3, "Email"),
(4, "Webhook"),
(5, "Password removal"),
(6, "Move to trash"),
],
default=1,
verbose_name="Workflow Action Type",
),
),
]

View File

@@ -1409,6 +1409,10 @@ class WorkflowAction(models.Model):
5,
_("Password removal"),
)
MOVE_TO_TRASH = (
6,
_("Move to trash"),
)
type = models.PositiveSmallIntegerField(
_("Workflow Action Type"),

View File

@@ -48,6 +48,7 @@ from documents.permissions import get_objects_for_user_owner_aware
from documents.templating.utils import convert_format_str_to_template_format
from documents.workflows.actions import build_workflow_action_context
from documents.workflows.actions import execute_email_action
from documents.workflows.actions import execute_move_to_trash_action
from documents.workflows.actions import execute_password_removal_action
from documents.workflows.actions import execute_webhook_action
from documents.workflows.mutations import apply_assignment_to_document
@@ -58,6 +59,8 @@ from documents.workflows.utils import get_workflows_for_trigger
from paperless.config import AIConfig
if TYPE_CHECKING:
import uuid
from documents.classifier import DocumentClassifier
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
@@ -727,7 +730,7 @@ def add_to_index(sender, document, **kwargs) -> None:
def run_workflows_added(
sender,
document: Document,
logging_group=None,
logging_group: uuid.UUID | None = None,
original_file=None,
**kwargs,
) -> None:
@@ -743,7 +746,7 @@ def run_workflows_added(
def run_workflows_updated(
sender,
document: Document,
logging_group=None,
logging_group: uuid.UUID | None = None,
**kwargs,
) -> None:
run_workflows(
@@ -757,7 +760,7 @@ def run_workflows(
trigger_type: WorkflowTrigger.WorkflowTriggerType,
document: Document | ConsumableDocument,
workflow_to_run: Workflow | None = None,
logging_group=None,
logging_group: uuid.UUID | None = None,
overrides: DocumentMetadataOverrides | None = None,
original_file: Path | None = None,
) -> tuple[DocumentMetadataOverrides, str] | None:
@@ -783,14 +786,33 @@ def run_workflows(
for workflow in workflows:
if not use_overrides:
# This can be called from bulk_update_documents, which may be running multiple times
# Refresh this so the matching data is fresh and instance fields are re-freshed
# Otherwise, this instance might be behind and overwrite the work another process did
document.refresh_from_db()
doc_tag_ids = list(document.tags.values_list("pk", flat=True))
if TYPE_CHECKING:
assert isinstance(document, Document)
try:
# This can be called from bulk_update_documents, which may be running multiple times
# Refresh this so the matching data is fresh and instance fields are re-freshed
# Otherwise, this instance might be behind and overwrite the work another process did
document.refresh_from_db()
doc_tag_ids = list(document.tags.values_list("pk", flat=True))
except Document.DoesNotExist:
# Document was hard deleted by a previous workflow or another process
logger.info(
"Document no longer exists, skipping remaining workflows",
extra={"group": logging_group},
)
break
# Check if document was soft deleted (moved to trash)
if document.is_deleted:
logger.info(
"Document was moved to trash, skipping remaining workflows",
extra={"group": logging_group},
)
break
if matching.document_matches_workflow(document, workflow, trigger_type):
action: WorkflowAction
has_move_to_trash_action = False
for action in workflow.actions.order_by("order", "pk"):
message = f"Applying {action} from {workflow}"
if not use_overrides:
@@ -834,6 +856,8 @@ def run_workflows(
)
elif action.type == WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL:
execute_password_removal_action(action, document, logging_group)
elif action.type == WorkflowAction.WorkflowActionType.MOVE_TO_TRASH:
has_move_to_trash_action = True
if not use_overrides:
# limit title to 128 characters
@@ -848,7 +872,12 @@ def run_workflows(
document=document if not use_overrides else None,
)
if has_move_to_trash_action:
execute_move_to_trash_action(action, document, logging_group)
if use_overrides:
if TYPE_CHECKING:
assert overrides is not None
return overrides, "\n".join(messages)

View File

@@ -896,3 +896,210 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
"Passwords are required",
str(response.data["non_field_errors"][0]),
)
def test_trash_action_validation(self) -> None:
"""
GIVEN:
- API request to create a workflow with a trash action
WHEN:
- API is called
THEN:
- Correct HTTP response
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Workflow 2",
"order": 1,
"triggers": [
{
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*",
},
],
"actions": [
{
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Workflow 3",
"order": 2,
"triggers": [
{
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*",
},
],
"actions": [
{
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_trash_action_as_last_action_valid(self) -> None:
"""
GIVEN:
- API request to create a workflow with multiple actions
- Move to trash action is the last action
WHEN:
- API is called
THEN:
- Workflow is created successfully
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Workflow with Move to Trash Last",
"order": 1,
"triggers": [
{
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*",
},
],
"actions": [
{
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
"assign_title": "Assigned Title",
},
{
"type": WorkflowAction.WorkflowActionType.REMOVAL,
"remove_all_tags": True,
},
{
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_update_workflow_add_trash_at_end_valid(self) -> None:
"""
GIVEN:
- Existing workflow without trash action
WHEN:
- PATCH to add trash action at end
THEN:
- HTTP 200 success
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Workflow to Add Move to Trash",
"order": 1,
"triggers": [
{
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*",
},
],
"actions": [
{
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
"assign_title": "First Action",
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
workflow_id = response.data["id"]
response = self.client.patch(
f"{self.ENDPOINT}{workflow_id}/",
json.dumps(
{
"actions": [
{
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
"assign_title": "First Action",
},
{
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_update_workflow_remove_trash_action_valid(self) -> None:
"""
GIVEN:
- Existing workflow with trash action
WHEN:
- PATCH to remove trash action
THEN:
- HTTP 200 success
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Workflow to Remove move to trash",
"order": 1,
"triggers": [
{
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*",
},
],
"actions": [
{
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
"assign_title": "First Action",
},
{
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
workflow_id = response.data["id"]
response = self.client.patch(
f"{self.ENDPOINT}{workflow_id}/",
json.dumps(
{
"actions": [
{
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
"assign_title": "Only Action",
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)

View File

@@ -3,9 +3,11 @@ import json
import shutil
import socket
import tempfile
from collections.abc import Callable
from datetime import timedelta
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any
from unittest import mock
import pytest
@@ -55,6 +57,7 @@ from documents.models import WorkflowActionEmail
from documents.models import WorkflowActionWebhook
from documents.models import WorkflowRun
from documents.models import WorkflowTrigger
from documents.plugins.base import StopConsumeTaskError
from documents.serialisers import WorkflowTriggerSerializer
from documents.signals import document_consumption_finished
from documents.tests.utils import DirectoriesMixin
@@ -3914,6 +3917,427 @@ class TestWorkflows(
)
assert mock_remove_password.call_count == 2
def test_workflow_trash_action_soft_delete(self):
"""
GIVEN:
- Document updated workflow with delete action
WHEN:
- Document that matches is updated
THEN:
- Document is moved to trash (soft deleted)
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
self.assertEqual(Document.objects.count(), 1)
self.assertEqual(Document.deleted_objects.count(), 0)
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
self.assertEqual(Document.objects.count(), 0)
self.assertEqual(Document.deleted_objects.count(), 1)
@override_settings(
PAPERLESS_EMAIL_HOST="localhost",
EMAIL_ENABLED=True,
PAPERLESS_URL="http://localhost:8000",
)
@mock.patch("django.core.mail.message.EmailMessage.send")
def test_workflow_trash_with_email_action(self, mock_email_send):
"""
GIVEN:
- Workflow with email action, then move to trash action
WHEN:
- Document matches and workflow runs
THEN:
- Email is sent first
- Document is moved to trash (soft deleted)
"""
mock_email_send.return_value = 1
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
email_action = WorkflowActionEmail.objects.create(
subject="Document deleted: {doc_title}",
body="Document {doc_title} will be deleted",
to="user@example.com",
include_document=False,
)
email_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.EMAIL,
email=email_action,
)
trash_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow with email then move to trash",
order=0,
)
w.triggers.add(trigger)
w.actions.add(email_workflow_action, trash_workflow_action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
self.assertEqual(Document.objects.count(), 1)
self.assertEqual(Document.deleted_objects.count(), 0)
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
mock_email_send.assert_called_once()
self.assertEqual(Document.objects.count(), 0)
self.assertEqual(Document.deleted_objects.count(), 1)
@override_settings(
PAPERLESS_URL="http://localhost:8000",
)
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
def test_workflow_trash_with_webhook_action(self, mock_webhook_delay):
"""
GIVEN:
- Workflow with webhook action (include_document=True), then move to trash action
WHEN:
- Document matches and workflow runs
THEN:
- Webhook .delay() is called with complete data including file bytes
- Document is moved to trash (soft deleted)
- Webhook task has all necessary data and doesn't rely on document existence
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
webhook_action = WorkflowActionWebhook.objects.create(
use_params=True,
params={
"title": "{{doc_title}}",
"message": "Document being deleted",
},
url="https://paperless-ngx.com/webhook",
include_document=True,
)
webhook_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.WEBHOOK,
webhook=webhook_action,
)
trash_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow with webhook then move to trash",
order=0,
)
w.triggers.add(trigger)
w.actions.add(webhook_workflow_action, trash_workflow_action)
w.save()
test_file = shutil.copy(
self.SAMPLE_DIR / "simple.pdf",
self.dirs.scratch_dir / "simple.pdf",
)
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="simple.pdf",
filename=test_file,
mime_type="application/pdf",
)
self.assertEqual(Document.objects.count(), 1)
self.assertEqual(Document.deleted_objects.count(), 0)
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
mock_webhook_delay.assert_called_once()
call_kwargs = mock_webhook_delay.call_args[1]
self.assertEqual(call_kwargs["url"], "https://paperless-ngx.com/webhook")
self.assertEqual(
call_kwargs["data"],
{"title": "sample test", "message": "Document being deleted"},
)
self.assertIsNotNone(call_kwargs["files"])
self.assertIn("file", call_kwargs["files"])
self.assertEqual(call_kwargs["files"]["file"][0], "simple.pdf")
self.assertEqual(call_kwargs["files"]["file"][2], "application/pdf")
self.assertIsInstance(call_kwargs["files"]["file"][1], bytes)
self.assertEqual(Document.objects.count(), 0)
self.assertEqual(Document.deleted_objects.count(), 1)
@override_settings(
PAPERLESS_EMAIL_HOST="localhost",
EMAIL_ENABLED=True,
PAPERLESS_URL="http://localhost:8000",
)
@mock.patch("django.core.mail.message.EmailMessage.send")
def test_workflow_trash_after_email_failure(self, mock_email_send) -> None:
"""
GIVEN:
- Workflow with email action (that fails), then move to trash action
WHEN:
- Document matches and workflow runs
- Email action raises exception
THEN:
- Email failure is logged
- Move to Trash still executes successfully (soft delete)
"""
mock_email_send.side_effect = Exception("Email server error")
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
email_action = WorkflowActionEmail.objects.create(
subject="Document deleted: {doc_title}",
body="Document {doc_title} will be deleted",
to="user@example.com",
include_document=False,
)
email_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.EMAIL,
email=email_action,
)
trash_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow with failing email then move to trash",
order=0,
)
w.triggers.add(trigger)
w.actions.add(email_workflow_action, trash_workflow_action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
self.assertEqual(Document.objects.count(), 1)
self.assertEqual(Document.deleted_objects.count(), 0)
with self.assertLogs("paperless.workflows.actions", level="ERROR") as cm:
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
expected_str = "Error occurred sending notification email"
self.assertIn(expected_str, cm.output[0])
self.assertEqual(Document.objects.count(), 0)
self.assertEqual(Document.deleted_objects.count(), 1)
def test_multiple_workflows_trash_then_assignment(self):
"""
GIVEN:
- Workflow 1 (order=0) with move to trash action
- Workflow 2 (order=1) with assignment action
- Both workflows match the same document
WHEN:
- Workflows run sequentially
THEN:
- First workflow runs and deletes document (soft delete)
- Second workflow does not trigger (document no longer exists)
- Logs confirm move to trash and skipping of remaining workflows
"""
trigger1 = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
trash_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w1 = Workflow.objects.create(
name="Workflow 1 - Move to Trash",
order=0,
)
w1.triggers.add(trigger1)
w1.actions.add(trash_workflow_action)
w1.save()
trigger2 = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
assignment_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.ASSIGNMENT,
assign_correspondent=self.c2,
)
w2 = Workflow.objects.create(
name="Workflow 2 - Assignment",
order=1,
)
w2.triggers.add(trigger2)
w2.actions.add(assignment_action)
w2.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
self.assertEqual(Document.objects.count(), 1)
self.assertEqual(Document.deleted_objects.count(), 0)
with self.assertLogs("paperless", level="DEBUG") as cm:
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
self.assertEqual(Document.objects.count(), 0)
self.assertEqual(Document.deleted_objects.count(), 1)
# We check logs instead of WorkflowRun.objects.count() because when the document
# is soft-deleted, the WorkflowRun is cascade-deleted (hard delete) since it does
# not inherit from the SoftDeleteModel. The logs confirm that the first workflow
# executed the move to trash and remaining workflows were skipped.
log_output = "\n".join(cm.output)
self.assertIn("Moved document", log_output)
self.assertIn("to trash", log_output)
self.assertIn(
"Document was moved to trash, skipping remaining workflows",
log_output,
)
def test_workflow_delete_action_during_consumption(self):
"""
GIVEN:
- Workflow with consumption trigger and delete action
WHEN:
- Document is being consumed and workflow runs
THEN:
- StopConsumeTaskError is raised to halt consumption
- Original file is deleted
- No document is created
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
sources=f"{DocumentSource.ConsumeFolder}",
filter_filename="*",
)
action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow Delete During Consumption",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
# Create a test file to be consumed
test_file = shutil.copy(
self.SAMPLE_DIR / "simple.pdf",
self.dirs.scratch_dir / "simple.pdf",
)
test_file_path = Path(test_file)
self.assertTrue(test_file_path.exists())
# Create a ConsumableDocument
consumable_doc = ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=test_file_path,
)
self.assertEqual(Document.objects.count(), 0)
# Run workflows with overrides (consumption flow)
with self.assertRaises(StopConsumeTaskError) as context:
run_workflows(
WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
consumable_doc,
overrides=DocumentMetadataOverrides(),
)
self.assertIn("deleted by workflow action", str(context.exception))
# File should be deleted
self.assertFalse(test_file_path.exists())
# No document should be created
self.assertEqual(Document.objects.count(), 0)
def test_workflow_delete_action_during_consumption_with_assignment(self):
"""
GIVEN:
- Workflow with consumption trigger, assignment action, then delete action
WHEN:
- Document is being consumed and workflow runs
THEN:
- StopConsumeTaskError is raised to halt consumption
- Original file is deleted
- No document is created (even though assignment would have worked)
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
sources=f"{DocumentSource.ConsumeFolder}",
filter_filename="*",
)
assignment_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.ASSIGNMENT,
assign_title="This should not be applied",
assign_correspondent=self.c,
)
trash_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow Assignment then Delete During Consumption",
order=0,
)
w.triggers.add(trigger)
w.actions.add(assignment_action, trash_workflow_action)
w.save()
# Create a test file to be consumed
test_file = shutil.copy(
self.SAMPLE_DIR / "simple.pdf",
self.dirs.scratch_dir / "simple2.pdf",
)
test_file_path = Path(test_file)
self.assertTrue(test_file_path.exists())
# Create a ConsumableDocument
consumable_doc = ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=test_file_path,
)
self.assertEqual(Document.objects.count(), 0)
# Run workflows with overrides (consumption flow)
with self.assertRaises(StopConsumeTaskError):
run_workflows(
WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
consumable_doc,
overrides=DocumentMetadataOverrides(),
)
# File should be deleted
self.assertFalse(test_file_path.exists())
# No document should be created
self.assertEqual(Document.objects.count(), 0)
class TestWebhookSend:
def test_send_webhook_data_or_json(
@@ -3956,13 +4380,17 @@ class TestWebhookSend:
@pytest.fixture
def resolve_to(monkeypatch):
def resolve_to(monkeypatch: pytest.MonkeyPatch) -> Callable[[str], None]:
"""
Force DNS resolution to a specific IP for any hostname.
"""
def _set(ip: str):
def fake_getaddrinfo(host, *_args, **_kwargs):
def _set(ip: str) -> None:
def fake_getaddrinfo(
host: str,
*_args: object,
**_kwargs: object,
) -> list[tuple[Any, ...]]:
return [(socket.AF_INET, None, None, "", (ip, 0))]
monkeypatch.setattr(socket, "getaddrinfo", fake_getaddrinfo)
@@ -4103,7 +4531,7 @@ class TestWebhookSecurity:
def test_strips_user_supplied_host_header(
self,
httpx_mock: HTTPXMock,
resolve_to,
resolve_to: Callable[[str], None],
) -> None:
"""
GIVEN:
@@ -4169,7 +4597,7 @@ class TestDateWorkflowLocalization(
self,
title_template: str,
expected_title: str,
):
) -> None:
"""
GIVEN:
- Document added workflow with title template using localize_date filter
@@ -4234,7 +4662,7 @@ class TestDateWorkflowLocalization(
self,
title_template: str,
expected_title: str,
):
) -> None:
"""
GIVEN:
- Document updated workflow with title template using localize_date filter
@@ -4310,7 +4738,7 @@ class TestDateWorkflowLocalization(
settings: SettingsWrapper,
title_template: str,
expected_title: str,
):
) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
sources=f"{DocumentSource.ApiUpload}",

View File

@@ -1,5 +1,6 @@
import logging
import re
import uuid
from pathlib import Path
from django.conf import settings
@@ -15,6 +16,7 @@ from documents.models import Document
from documents.models import DocumentType
from documents.models import WorkflowAction
from documents.models import WorkflowTrigger
from documents.plugins.base import StopConsumeTaskError
from documents.signals import document_consumption_finished
from documents.templating.workflows import parse_w_workflow_placeholders
from documents.workflows.webhooks import send_webhook
@@ -338,3 +340,33 @@ def execute_password_removal_action(
document.pk,
extra={"group": logging_group},
)
def execute_move_to_trash_action(
action: WorkflowAction,
document: Document | ConsumableDocument,
logging_group: uuid.UUID | None,
) -> None:
"""
Execute a move to trash action for a workflow on an existing document or a
document in consumption. In case of an existing document it soft-deletes
the document. In case of consumption it aborts consumption and deletes the
file.
"""
if isinstance(document, Document):
document.delete()
logger.debug(
f"Moved document {document} to trash",
extra={"group": logging_group},
)
else:
if document.original_file.exists():
document.original_file.unlink()
logger.info(
f"Workflow move to trash action triggered during consumption, "
f"deleting file {document.original_file}",
extra={"group": logging_group},
)
raise StopConsumeTaskError(
"Document deleted by workflow action during consumption",
)

View File

@@ -2,7 +2,7 @@ msgid ""
msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2026-02-16 17:32+0000\n"
"POT-Creation-Date: 2026-02-24 00:43+0000\n"
"PO-Revision-Date: 2022-02-17 04:17\n"
"Last-Translator: \n"
"Language-Team: English\n"
@@ -89,7 +89,7 @@ msgstr ""
msgid "Automatic"
msgstr ""
#: documents/models.py:66 documents/models.py:444 documents/models.py:1659
#: documents/models.py:66 documents/models.py:444 documents/models.py:1663
#: paperless_mail/models.py:23 paperless_mail/models.py:143
msgid "name"
msgstr ""
@@ -252,7 +252,7 @@ msgid "The position of this document in your physical document archive."
msgstr ""
#: documents/models.py:313 documents/models.py:688 documents/models.py:742
#: documents/models.py:1702
#: documents/models.py:1706
msgid "document"
msgstr ""
@@ -1093,193 +1093,197 @@ msgid "Password removal"
msgstr ""
#: documents/models.py:1414
msgid "Move to trash"
msgstr ""
#: documents/models.py:1418
msgid "Workflow Action Type"
msgstr ""
#: documents/models.py:1419 documents/models.py:1661
#: documents/models.py:1423 documents/models.py:1665
#: paperless_mail/models.py:145
msgid "order"
msgstr ""
#: documents/models.py:1422
#: documents/models.py:1426
msgid "assign title"
msgstr ""
#: documents/models.py:1426
#: documents/models.py:1430
msgid "Assign a document title, must be a Jinja2 template, see documentation."
msgstr ""
#: documents/models.py:1434 paperless_mail/models.py:274
#: documents/models.py:1438 paperless_mail/models.py:274
msgid "assign this tag"
msgstr ""
#: documents/models.py:1443 paperless_mail/models.py:282
#: documents/models.py:1447 paperless_mail/models.py:282
msgid "assign this document type"
msgstr ""
#: documents/models.py:1452 paperless_mail/models.py:296
#: documents/models.py:1456 paperless_mail/models.py:296
msgid "assign this correspondent"
msgstr ""
#: documents/models.py:1461
#: documents/models.py:1465
msgid "assign this storage path"
msgstr ""
#: documents/models.py:1470
#: documents/models.py:1474
msgid "assign this owner"
msgstr ""
#: documents/models.py:1477
#: documents/models.py:1481
msgid "grant view permissions to these users"
msgstr ""
#: documents/models.py:1484
#: documents/models.py:1488
msgid "grant view permissions to these groups"
msgstr ""
#: documents/models.py:1491
#: documents/models.py:1495
msgid "grant change permissions to these users"
msgstr ""
#: documents/models.py:1498
#: documents/models.py:1502
msgid "grant change permissions to these groups"
msgstr ""
#: documents/models.py:1505
#: documents/models.py:1509
msgid "assign these custom fields"
msgstr ""
#: documents/models.py:1509
#: documents/models.py:1513
msgid "custom field values"
msgstr ""
#: documents/models.py:1513
#: documents/models.py:1517
msgid "Optional values to assign to the custom fields."
msgstr ""
#: documents/models.py:1522
#: documents/models.py:1526
msgid "remove these tag(s)"
msgstr ""
#: documents/models.py:1527
#: documents/models.py:1531
msgid "remove all tags"
msgstr ""
#: documents/models.py:1534
#: documents/models.py:1538
msgid "remove these document type(s)"
msgstr ""
#: documents/models.py:1539
#: documents/models.py:1543
msgid "remove all document types"
msgstr ""
#: documents/models.py:1546
#: documents/models.py:1550
msgid "remove these correspondent(s)"
msgstr ""
#: documents/models.py:1551
#: documents/models.py:1555
msgid "remove all correspondents"
msgstr ""
#: documents/models.py:1558
#: documents/models.py:1562
msgid "remove these storage path(s)"
msgstr ""
#: documents/models.py:1563
#: documents/models.py:1567
msgid "remove all storage paths"
msgstr ""
#: documents/models.py:1570
#: documents/models.py:1574
msgid "remove these owner(s)"
msgstr ""
#: documents/models.py:1575
#: documents/models.py:1579
msgid "remove all owners"
msgstr ""
#: documents/models.py:1582
#: documents/models.py:1586
msgid "remove view permissions for these users"
msgstr ""
#: documents/models.py:1589
#: documents/models.py:1593
msgid "remove view permissions for these groups"
msgstr ""
#: documents/models.py:1596
#: documents/models.py:1600
msgid "remove change permissions for these users"
msgstr ""
#: documents/models.py:1603
#: documents/models.py:1607
msgid "remove change permissions for these groups"
msgstr ""
#: documents/models.py:1608
#: documents/models.py:1612
msgid "remove all permissions"
msgstr ""
#: documents/models.py:1615
#: documents/models.py:1619
msgid "remove these custom fields"
msgstr ""
#: documents/models.py:1620
#: documents/models.py:1624
msgid "remove all custom fields"
msgstr ""
#: documents/models.py:1629
#: documents/models.py:1633
msgid "email"
msgstr ""
#: documents/models.py:1638
#: documents/models.py:1642
msgid "webhook"
msgstr ""
#: documents/models.py:1642
#: documents/models.py:1646
msgid "passwords"
msgstr ""
#: documents/models.py:1646
#: documents/models.py:1650
msgid ""
"Passwords to try when removing PDF protection. Separate with commas or new "
"lines."
msgstr ""
#: documents/models.py:1651
#: documents/models.py:1655
msgid "workflow action"
msgstr ""
#: documents/models.py:1652
#: documents/models.py:1656
msgid "workflow actions"
msgstr ""
#: documents/models.py:1667
#: documents/models.py:1671
msgid "triggers"
msgstr ""
#: documents/models.py:1674
#: documents/models.py:1678
msgid "actions"
msgstr ""
#: documents/models.py:1677 paperless_mail/models.py:154
#: documents/models.py:1681 paperless_mail/models.py:154
msgid "enabled"
msgstr ""
#: documents/models.py:1688
#: documents/models.py:1692
msgid "workflow"
msgstr ""
#: documents/models.py:1692
#: documents/models.py:1696
msgid "workflow trigger type"
msgstr ""
#: documents/models.py:1706
#: documents/models.py:1710
msgid "date run"
msgstr ""
#: documents/models.py:1712
#: documents/models.py:1716
msgid "workflow run"
msgstr ""
#: documents/models.py:1713
#: documents/models.py:1717
msgid "workflow runs"
msgstr ""

View File

@@ -1,6 +1,6 @@
from typing import Final
__version__: Final[tuple[int, int, int]] = (2, 20, 7)
__version__: Final[tuple[int, int, int]] = (2, 20, 8)
# Version string like X.Y.Z
__full_version_str__: Final[str] = ".".join(map(str, __version__))
# Version string like X.Y

View File

@@ -23,6 +23,7 @@ def get_embedding_model() -> BaseEmbedding:
return OpenAIEmbedding(
model=config.llm_embedding_model or "text-embedding-3-small",
api_key=config.llm_api_key,
api_base=config.llm_endpoint or None,
)
case LLMEmbeddingBackend.HUGGINGFACE:
return HuggingFaceEmbedding(

View File

@@ -65,12 +65,14 @@ def test_get_embedding_model_openai(mock_ai_config):
mock_ai_config.return_value.llm_embedding_backend = LLMEmbeddingBackend.OPENAI
mock_ai_config.return_value.llm_embedding_model = "text-embedding-3-small"
mock_ai_config.return_value.llm_api_key = "test_api_key"
mock_ai_config.return_value.llm_endpoint = "http://test-url"
with patch("paperless_ai.embedding.OpenAIEmbedding") as MockOpenAIEmbedding:
model = get_embedding_model()
MockOpenAIEmbedding.assert_called_once_with(
model="text-embedding-3-small",
api_key="test_api_key",
api_base="http://test-url",
)
assert model == MockOpenAIEmbedding.return_value

View File

@@ -536,6 +536,7 @@ class MailAccountHandler(LoggingMixin):
self.log.debug(f"Processing mail account {account}")
total_processed_files = 0
consumed_messages: set[tuple[str, str]] = set()
try:
with get_mailbox(
account.imap_server,
@@ -574,6 +575,7 @@ class MailAccountHandler(LoggingMixin):
M,
rule,
supports_gmail_labels=supports_gmail_labels,
consumed_messages=consumed_messages,
)
if total_processed_files > 0 and rule.stop_processing:
self.log.debug(
@@ -605,6 +607,7 @@ class MailAccountHandler(LoggingMixin):
rule: MailRule,
*,
supports_gmail_labels: bool,
consumed_messages: set[tuple[str, str]],
):
folders = [rule.folder]
# In case of MOVE, make sure also the destination exists
@@ -652,11 +655,26 @@ class MailAccountHandler(LoggingMixin):
mails_processed = 0
total_processed_files = 0
rule_seen_messages: set[tuple[str, str]] = set()
for message in messages:
if TYPE_CHECKING:
assert isinstance(message, MailMessage)
message_key = (rule.folder, message.uid)
if message_key in rule_seen_messages:
self.log.debug(
f"Skipping duplicate fetched mail '{message.uid}' subject '{message.subject}' from '{message.from_}'.",
)
continue
rule_seen_messages.add(message_key)
if message_key in consumed_messages:
self.log.debug(
f"Skipping mail '{message.uid}' subject '{message.subject}' from '{message.from_}', already queued by a previous rule in this run.",
)
continue
if ProcessedMail.objects.filter(
rule=rule,
uid=message.uid,
@@ -669,6 +687,8 @@ class MailAccountHandler(LoggingMixin):
try:
processed_files = self._handle_message(message, rule)
if processed_files > 0:
consumed_messages.add(message_key)
total_processed_files += processed_files
mails_processed += 1

View File

@@ -15,7 +15,7 @@ class Migration(migrations.Migration):
name="stop_processing",
field=models.BooleanField(
default=False,
help_text="If True, no further rules will be processed after this one if any document is consumed.",
help_text="If True, no further rules will be processed after this one if any document is queued.",
verbose_name="Stop processing further rules",
),
),

View File

@@ -272,6 +272,24 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["success"], True)
def test_mail_account_test_existing_nonexistent_id_forbidden(self) -> None:
response = self.client.post(
f"{self.ENDPOINT}test/",
json.dumps(
{
"id": 999999,
"imap_server": "server.example.com",
"imap_port": 443,
"imap_security": MailAccount.ImapSecurity.SSL,
"username": "admin",
"password": "******",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content.decode(), "Insufficient permissions")
def test_get_mail_accounts_owner_aware(self) -> None:
"""
GIVEN:

View File

@@ -8,6 +8,7 @@ from datetime import timedelta
from unittest import mock
import pytest
from django.contrib.auth.models import Permission
from django.contrib.auth.models import User
from django.core.management import call_command
from django.db import DatabaseError
@@ -862,6 +863,66 @@ class TestMail(
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 0)
def test_handle_mail_account_overlapping_rules_only_first_consumes(self):
account = MailAccount.objects.create(
name="test",
imap_server="",
username="admin",
password="secret",
)
first_rule = MailRule.objects.create(
name="testrule-first",
account=account,
action=MailRule.MailAction.DELETE,
filter_subject="Claim",
order=1,
)
_ = MailRule.objects.create(
name="testrule-second",
account=account,
action=MailRule.MailAction.DELETE,
filter_subject="Claim",
order=2,
)
self.mail_account_handler.handle_mail_account(account)
self.mailMocker.apply_mail_actions()
self.assertEqual(self.mailMocker._queue_consumption_tasks_mock.call_count, 1)
queued_rule = self.mailMocker._queue_consumption_tasks_mock.call_args.kwargs[
"rule"
]
self.assertEqual(queued_rule.id, first_rule.id)
def test_handle_mail_account_skip_duplicate_uids_from_fetch(self):
account = MailAccount.objects.create(
name="test",
imap_server="",
username="admin",
password="secret",
)
_ = MailRule.objects.create(
name="testrule",
account=account,
action=MailRule.MailAction.DELETE,
filter_subject="Duplicated mail",
)
duplicated_message = self.mailMocker.messageBuilder.create_message(
subject="Duplicated mail",
)
self.mailMocker.bogus_mailbox.messages = [
duplicated_message,
duplicated_message,
]
self.mailMocker.bogus_mailbox.updateClient()
self.mail_account_handler.handle_mail_account(account)
self.mailMocker.apply_mail_actions()
self.assertEqual(self.mailMocker._queue_consumption_tasks_mock.call_count, 1)
@pytest.mark.flaky(reruns=4)
def test_handle_mail_account_flag(self) -> None:
account = MailAccount.objects.create(
@@ -1734,6 +1795,10 @@ class TestMailAccountTestView(APITestCase):
username="testuser",
password="testpassword",
)
self.user.user_permissions.add(
*Permission.objects.filter(codename__in=["add_mailaccount"]),
)
self.user.save()
self.client.force_authenticate(user=self.user)
self.url = "/api/mail_accounts/test/"
@@ -1850,6 +1915,56 @@ class TestMailAccountTestView(APITestCase):
expected_str = "Unable to refresh oauth token"
self.assertIn(expected_str, error_str)
def test_mail_account_test_view_existing_forbidden_for_other_owner(self) -> None:
other_user = User.objects.create_user(
username="otheruser",
password="testpassword",
)
existing_account = MailAccount.objects.create(
name="Owned account",
imap_server="imap.example.com",
imap_port=993,
imap_security=MailAccount.ImapSecurity.SSL,
username="admin",
password="secret",
owner=other_user,
)
data = {
"id": existing_account.id,
"imap_server": "imap.example.com",
"imap_port": 993,
"imap_security": MailAccount.ImapSecurity.SSL,
"username": "admin",
"password": "****",
"is_token": False,
}
response = self.client.post(self.url, data, format="json")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content.decode(), "Insufficient permissions")
def test_mail_account_test_view_requires_add_permission_without_account_id(
self,
) -> None:
self.user.user_permissions.remove(
*Permission.objects.filter(codename__in=["add_mailaccount"]),
)
self.user.save()
data = {
"imap_server": "imap.example.com",
"imap_port": 993,
"imap_security": MailAccount.ImapSecurity.SSL,
"username": "admin",
"password": "secret",
"is_token": False,
}
response = self.client.post(self.url, data, format="json")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content.decode(), "Insufficient permissions")
class TestMailAccountProcess(APITestCase):
def setUp(self) -> None:

View File

@@ -86,13 +86,34 @@ class MailAccountViewSet(ModelViewSet, PassUserMixin):
request.data["name"] = datetime.datetime.now().isoformat()
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
existing_account = None
account_id = request.data.get("id")
# account exists, use the password from there instead of *** and refresh_token / expiration
# testing a new connection requires add permission
if account_id is None and not request.user.has_perms(
["paperless_mail.add_mailaccount"],
):
return HttpResponseForbidden("Insufficient permissions")
# testing an existing account requires change permission on that account
if account_id is not None:
try:
existing_account = MailAccount.objects.get(pk=account_id)
except (TypeError, ValueError, MailAccount.DoesNotExist):
return HttpResponseForbidden("Insufficient permissions")
if not has_perms_owner_aware(
request.user,
"change_mailaccount",
existing_account,
):
return HttpResponseForbidden("Insufficient permissions")
# account exists, use the password from there instead of ***
if (
len(serializer.validated_data.get("password").replace("*", "")) == 0
and request.data["id"] is not None
and existing_account is not None
):
existing_account = MailAccount.objects.get(pk=request.data["id"])
serializer.validated_data["password"] = existing_account.password
serializer.validated_data["account_type"] = existing_account.account_type
serializer.validated_data["refresh_token"] = existing_account.refresh_token
@@ -106,7 +127,8 @@ class MailAccountViewSet(ModelViewSet, PassUserMixin):
) as M:
try:
if (
account.is_token
existing_account is not None
and account.is_token
and account.expiration is not None
and account.expiration < timezone.now()
):
@@ -248,6 +270,7 @@ class OauthCallbackView(GenericAPIView):
imap_server=imap_server,
refresh_token=refresh_token,
expiration=timezone.now() + timedelta(seconds=expires_in),
owner=request.user,
defaults=defaults,
)
return HttpResponseRedirect(

2
uv.lock generated
View File

@@ -3019,7 +3019,7 @@ wheels = [
[[package]]
name = "paperless-ngx"
version = "2.20.7"
version = "2.20.8"
source = { virtual = "." }
dependencies = [
{ name = "azure-ai-documentintelligence", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },