mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-03-07 09:41:22 +00:00
Compare commits
9 Commits
feature-sh
...
feature-fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2eabdcb0ef | ||
|
|
ac8bab7ed0 | ||
|
|
3e961eff0e | ||
|
|
91ddda9256 | ||
|
|
9d5e618de8 | ||
|
|
50ae49c7da | ||
|
|
ba023ef332 | ||
|
|
7345f2e81c | ||
|
|
731448a8f9 |
12
.github/workflows/ci-docker.yml
vendored
12
.github/workflows/ci-docker.yml
vendored
@@ -149,15 +149,16 @@ jobs:
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
echo "digest=${digest}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
echo "${digest}" > "/tmp/digests/digest-${{ matrix.arch }}.txt"
|
||||
- name: Upload digest
|
||||
if: steps.check-push.outputs.should-push == 'true'
|
||||
uses: actions/upload-artifact@v7.0.0
|
||||
with:
|
||||
name: digests-${{ matrix.arch }}
|
||||
path: /tmp/digests/*
|
||||
path: /tmp/digests/digest-${{ matrix.arch }}.txt
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
archive: false
|
||||
merge-and-push:
|
||||
name: Merge and Push Manifest
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -171,7 +172,7 @@ jobs:
|
||||
uses: actions/download-artifact@v8.0.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
pattern: digest-*.txt
|
||||
merge-multiple: true
|
||||
- name: List digests
|
||||
run: |
|
||||
@@ -217,8 +218,9 @@ jobs:
|
||||
tags=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "${DOCKER_METADATA_OUTPUT_JSON}")
|
||||
|
||||
digests=""
|
||||
for digest in *; do
|
||||
digests+="${{ env.REGISTRY }}/${REPOSITORY}@sha256:${digest} "
|
||||
for digest_file in digest-*.txt; do
|
||||
digest=$(cat "${digest_file}")
|
||||
digests+="${{ env.REGISTRY }}/${REPOSITORY}@${digest} "
|
||||
done
|
||||
|
||||
echo "Creating manifest with tags: ${tags}"
|
||||
|
||||
17
.github/workflows/pr-bot.yml
vendored
17
.github/workflows/pr-bot.yml
vendored
@@ -2,13 +2,24 @@ name: PR Bot
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
jobs:
|
||||
anti-slop:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
issues: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: peakoss/anti-slop@v0.2.1
|
||||
with:
|
||||
max-failures: 4
|
||||
failure-add-pr-labels: 'ai'
|
||||
pr-bot:
|
||||
name: Automated PR Bot
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Label PR by file path or branch name
|
||||
# see .github/labeler.yml for the labeler config
|
||||
|
||||
@@ -1217,7 +1217,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1756</context>
|
||||
<context context-type="linenumber">1760</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1577733187050997705" datatype="html">
|
||||
@@ -2090,7 +2090,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">634</context>
|
||||
<context context-type="linenumber">637</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-version-dropdown/document-version-dropdown.component.html</context>
|
||||
@@ -2798,11 +2798,11 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1376</context>
|
||||
<context context-type="linenumber">1379</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1757</context>
|
||||
<context context-type="linenumber">1761</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -3400,7 +3400,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1329</context>
|
||||
<context context-type="linenumber">1332</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -3505,7 +3505,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1808</context>
|
||||
<context context-type="linenumber">1814</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6661109599266152398" datatype="html">
|
||||
@@ -3516,7 +3516,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1809</context>
|
||||
<context context-type="linenumber">1815</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5162686434580248853" datatype="html">
|
||||
@@ -3527,7 +3527,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1810</context>
|
||||
<context context-type="linenumber">1816</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8157388568390631653" datatype="html">
|
||||
@@ -5488,7 +5488,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1333</context>
|
||||
<context context-type="linenumber">1336</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -7695,81 +7695,81 @@
|
||||
<source>Error retrieving metadata</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">408</context>
|
||||
<context context-type="linenumber">411</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2218903673684131427" datatype="html">
|
||||
<source>An error occurred loading content: <x id="PH" equiv-text="err.message ?? err.toString()"/></source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">509,511</context>
|
||||
<context context-type="linenumber">512,514</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">956,958</context>
|
||||
<context context-type="linenumber">959,961</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6357361810318120957" datatype="html">
|
||||
<source>Document was updated</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">629</context>
|
||||
<context context-type="linenumber">632</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5154064822428631306" datatype="html">
|
||||
<source>Document was updated at <x id="PH" equiv-text="formattedModified"/>.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">630</context>
|
||||
<context context-type="linenumber">633</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8462497568316256794" datatype="html">
|
||||
<source>Reload to discard your local unsaved edits and load the latest remote version.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">631</context>
|
||||
<context context-type="linenumber">634</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="7967484035994732534" datatype="html">
|
||||
<source>Reload</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">633</context>
|
||||
<context context-type="linenumber">636</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2907037627372942104" datatype="html">
|
||||
<source>Document reloaded with latest changes.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">689</context>
|
||||
<context context-type="linenumber">692</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6435639868943916539" datatype="html">
|
||||
<source>Document reloaded.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">700</context>
|
||||
<context context-type="linenumber">703</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6142395741265832184" datatype="html">
|
||||
<source>Next document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">802</context>
|
||||
<context context-type="linenumber">805</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="651985345816518480" datatype="html">
|
||||
<source>Previous document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">812</context>
|
||||
<context context-type="linenumber">815</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2885986061416655600" datatype="html">
|
||||
<source>Close document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">820</context>
|
||||
<context context-type="linenumber">823</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/services/open-documents.service.ts</context>
|
||||
@@ -7780,67 +7780,67 @@
|
||||
<source>Save document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">827</context>
|
||||
<context context-type="linenumber">830</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1784543155727940353" datatype="html">
|
||||
<source>Save and close / next</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">836</context>
|
||||
<context context-type="linenumber">839</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="7427704425579737895" datatype="html">
|
||||
<source>Error retrieving version content</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">940</context>
|
||||
<context context-type="linenumber">943</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3456881259945295697" datatype="html">
|
||||
<source>Error retrieving suggestions.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">997</context>
|
||||
<context context-type="linenumber">1000</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2194092841814123758" datatype="html">
|
||||
<source>Document "<x id="PH" equiv-text="newValues.title"/>" saved successfully.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1209</context>
|
||||
<context context-type="linenumber">1212</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1236</context>
|
||||
<context context-type="linenumber">1239</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6626387786259219838" datatype="html">
|
||||
<source>Error saving document "<x id="PH" equiv-text="this.document.title"/>"</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1242</context>
|
||||
<context context-type="linenumber">1245</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="448882439049417053" datatype="html">
|
||||
<source>Error saving document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1297</context>
|
||||
<context context-type="linenumber">1300</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8410796510716511826" datatype="html">
|
||||
<source>Do you really want to move the document "<x id="PH" equiv-text="this.document.title"/>" to the trash?</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1330</context>
|
||||
<context context-type="linenumber">1333</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="282586936710748252" datatype="html">
|
||||
<source>Documents can be restored prior to permanent deletion.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1331</context>
|
||||
<context context-type="linenumber">1334</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -7851,14 +7851,14 @@
|
||||
<source>Error deleting document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1352</context>
|
||||
<context context-type="linenumber">1355</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="619486176823357521" datatype="html">
|
||||
<source>Reprocess confirm</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1372</context>
|
||||
<context context-type="linenumber">1375</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -7869,102 +7869,102 @@
|
||||
<source>This operation will permanently recreate the archive file for this document.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1373</context>
|
||||
<context context-type="linenumber">1376</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="302054111564709516" datatype="html">
|
||||
<source>The archive file will be re-generated with the current settings.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1374</context>
|
||||
<context context-type="linenumber">1377</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4700389117298802932" datatype="html">
|
||||
<source>Reprocess operation for "<x id="PH" equiv-text="this.document.title"/>" will begin in the background.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1384</context>
|
||||
<context context-type="linenumber">1387</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4409560272830824468" datatype="html">
|
||||
<source>Error executing operation</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1395</context>
|
||||
<context context-type="linenumber">1398</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6030453331794586802" datatype="html">
|
||||
<source>Error downloading document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1458</context>
|
||||
<context context-type="linenumber">1461</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4458954481601077369" datatype="html">
|
||||
<source>Page Fit</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1538</context>
|
||||
<context context-type="linenumber">1541</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4663705961777238777" datatype="html">
|
||||
<source>PDF edit operation for "<x id="PH" equiv-text="this.document.title"/>" will begin in the background.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1775</context>
|
||||
<context context-type="linenumber">1781</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="9043972994040261999" datatype="html">
|
||||
<source>Error executing PDF edit operation</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1787</context>
|
||||
<context context-type="linenumber">1793</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6172690334763056188" datatype="html">
|
||||
<source>Please enter the current password before attempting to remove it.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1798</context>
|
||||
<context context-type="linenumber">1804</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="968660764814228922" datatype="html">
|
||||
<source>Password removal operation for "<x id="PH" equiv-text="this.document.title"/>" will begin in the background.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1830</context>
|
||||
<context context-type="linenumber">1838</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2282118435712883014" datatype="html">
|
||||
<source>Error executing password removal operation</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1844</context>
|
||||
<context context-type="linenumber">1852</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3740891324955700797" datatype="html">
|
||||
<source>Print failed.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1883</context>
|
||||
<context context-type="linenumber">1891</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6457245677384603573" datatype="html">
|
||||
<source>Error loading document for printing.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1895</context>
|
||||
<context context-type="linenumber">1903</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6085793215710522488" datatype="html">
|
||||
<source>An error occurred loading tiff: <x id="PH" equiv-text="err.toString()"/></source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1960</context>
|
||||
<context context-type="linenumber">1968</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1964</context>
|
||||
<context context-type="linenumber">1972</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4958946940233632319" datatype="html">
|
||||
|
||||
@@ -3,6 +3,7 @@ import { provideHttpClientTesting } from '@angular/common/http/testing'
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { PDFEditorComponent } from './pdf-editor.component'
|
||||
|
||||
describe('PDFEditorComponent', () => {
|
||||
@@ -139,4 +140,16 @@ describe('PDFEditorComponent', () => {
|
||||
expect(component.pages[1].page).toBe(2)
|
||||
expect(component.pages[2].page).toBe(3)
|
||||
})
|
||||
|
||||
it('should include selected version in preview source when provided', () => {
|
||||
const documentService = TestBed.inject(DocumentService)
|
||||
const previewSpy = jest
|
||||
.spyOn(documentService, 'getPreviewUrl')
|
||||
.mockReturnValue('preview-version')
|
||||
component.documentID = 3
|
||||
component.versionID = 10
|
||||
|
||||
expect(component.pdfSrc).toBe('preview-version')
|
||||
expect(previewSpy).toHaveBeenCalledWith(3, false, 10)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -46,6 +46,7 @@ export class PDFEditorComponent extends ConfirmDialogComponent {
|
||||
activeModal: NgbActiveModal = inject(NgbActiveModal)
|
||||
|
||||
documentID: number
|
||||
versionID?: number
|
||||
pages: PageOperation[] = []
|
||||
totalPages = 0
|
||||
editMode: PdfEditorEditMode = this.settingsService.get(
|
||||
@@ -55,7 +56,11 @@ export class PDFEditorComponent extends ConfirmDialogComponent {
|
||||
includeMetadata: boolean = true
|
||||
|
||||
get pdfSrc(): string {
|
||||
return this.documentService.getPreviewUrl(this.documentID)
|
||||
return this.documentService.getPreviewUrl(
|
||||
this.documentID,
|
||||
false,
|
||||
this.versionID
|
||||
)
|
||||
}
|
||||
|
||||
pdfLoaded(pdf: PngxPdfDocumentProxy) {
|
||||
|
||||
@@ -1661,22 +1661,25 @@ describe('DocumentDetailComponent', () => {
|
||||
const closeSpy = jest.spyOn(openDocumentsService, 'closeDocument')
|
||||
const errorSpy = jest.spyOn(toastService, 'showError')
|
||||
initNormally()
|
||||
component.selectedVersionId = 10
|
||||
component.editPdf()
|
||||
expect(modal).not.toBeUndefined()
|
||||
modal.componentInstance.documentID = doc.id
|
||||
expect(modal.componentInstance.versionID).toBe(10)
|
||||
modal.componentInstance.pages = [{ page: 1, rotate: 0, splitAfter: false }]
|
||||
modal.componentInstance.confirm()
|
||||
let req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
expect(req.request.body).toEqual({
|
||||
documents: [doc.id],
|
||||
documents: [10],
|
||||
method: 'edit_pdf',
|
||||
parameters: {
|
||||
operations: [{ page: 1, rotate: 0, doc: 0 }],
|
||||
delete_original: false,
|
||||
update_document: false,
|
||||
include_metadata: true,
|
||||
source_mode: 'explicit_selection',
|
||||
},
|
||||
})
|
||||
req.error(new ErrorEvent('failed'))
|
||||
@@ -1698,6 +1701,7 @@ describe('DocumentDetailComponent', () => {
|
||||
let modal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((m) => (modal = m[0]))
|
||||
initNormally()
|
||||
component.selectedVersionId = 10
|
||||
component.password = 'secret'
|
||||
component.removePassword()
|
||||
const dialog =
|
||||
@@ -1710,13 +1714,14 @@ describe('DocumentDetailComponent', () => {
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
expect(req.request.body).toEqual({
|
||||
documents: [doc.id],
|
||||
documents: [10],
|
||||
method: 'remove_password',
|
||||
parameters: {
|
||||
password: 'secret',
|
||||
update_document: false,
|
||||
include_metadata: false,
|
||||
delete_original: true,
|
||||
source_mode: 'explicit_selection',
|
||||
},
|
||||
})
|
||||
req.flush(true)
|
||||
|
||||
@@ -74,7 +74,10 @@ import {
|
||||
import { CorrespondentService } from 'src/app/services/rest/correspondent.service'
|
||||
import { CustomFieldsService } from 'src/app/services/rest/custom-fields.service'
|
||||
import { DocumentTypeService } from 'src/app/services/rest/document-type.service'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import {
|
||||
BulkEditSourceMode,
|
||||
DocumentService,
|
||||
} from 'src/app/services/rest/document.service'
|
||||
import { SavedViewService } from 'src/app/services/rest/saved-view.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { TagService } from 'src/app/services/rest/tag.service'
|
||||
@@ -1753,20 +1756,23 @@ export class DocumentDetailComponent
|
||||
size: 'xl',
|
||||
scrollable: true,
|
||||
})
|
||||
const sourceDocumentId = this.selectedVersionId ?? this.document.id
|
||||
modal.componentInstance.title = $localize`PDF Editor`
|
||||
modal.componentInstance.btnCaption = $localize`Proceed`
|
||||
modal.componentInstance.documentID = this.document.id
|
||||
modal.componentInstance.versionID = sourceDocumentId
|
||||
modal.componentInstance.confirmClicked
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe(() => {
|
||||
modal.componentInstance.buttonsEnabled = false
|
||||
this.documentsService
|
||||
.bulkEdit([this.document.id], 'edit_pdf', {
|
||||
.bulkEdit([sourceDocumentId], 'edit_pdf', {
|
||||
operations: modal.componentInstance.getOperations(),
|
||||
delete_original: modal.componentInstance.deleteOriginal,
|
||||
update_document:
|
||||
modal.componentInstance.editMode == PdfEditorEditMode.Update,
|
||||
include_metadata: modal.componentInstance.includeMetadata,
|
||||
source_mode: BulkEditSourceMode.EXPLICIT_SELECTION,
|
||||
})
|
||||
.pipe(first(), takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe({
|
||||
@@ -1812,16 +1818,18 @@ export class DocumentDetailComponent
|
||||
modal.componentInstance.confirmClicked
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe(() => {
|
||||
const sourceDocumentId = this.selectedVersionId ?? this.document.id
|
||||
const dialog =
|
||||
modal.componentInstance as PasswordRemovalConfirmDialogComponent
|
||||
dialog.buttonsEnabled = false
|
||||
this.networkActive = true
|
||||
this.documentsService
|
||||
.bulkEdit([this.document.id], 'remove_password', {
|
||||
.bulkEdit([sourceDocumentId], 'remove_password', {
|
||||
password: this.password,
|
||||
update_document: dialog.updateDocument,
|
||||
include_metadata: dialog.includeMetadata,
|
||||
delete_original: dialog.deleteOriginal,
|
||||
source_mode: BulkEditSourceMode.EXPLICIT_SELECTION,
|
||||
})
|
||||
.pipe(first(), takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe({
|
||||
|
||||
@@ -37,6 +37,11 @@ export interface SelectionData {
|
||||
selected_custom_fields: SelectionDataItem[]
|
||||
}
|
||||
|
||||
export enum BulkEditSourceMode {
|
||||
LATEST_VERSION = 'latest_version',
|
||||
EXPLICIT_SELECTION = 'explicit_selection',
|
||||
}
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root',
|
||||
})
|
||||
|
||||
@@ -29,12 +29,21 @@ from documents.plugins.helpers import DocumentsStatusManager
|
||||
from documents.tasks import bulk_update_documents
|
||||
from documents.tasks import consume_file
|
||||
from documents.tasks import update_document_content_maybe_archive_file
|
||||
from documents.versioning import get_latest_version_for_root
|
||||
from documents.versioning import get_root_document
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
logger: logging.Logger = logging.getLogger("paperless.bulk_edit")
|
||||
|
||||
SourceMode = Literal["latest_version", "explicit_selection"]
|
||||
|
||||
|
||||
class SourceModeChoices:
|
||||
LATEST_VERSION: SourceMode = "latest_version"
|
||||
EXPLICIT_SELECTION: SourceMode = "explicit_selection"
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def restore_archive_serial_numbers_task(
|
||||
@@ -72,46 +81,21 @@ def restore_archive_serial_numbers(backup: dict[int, int | None]) -> None:
|
||||
logger.info(f"Restored archive serial numbers for documents {list(backup.keys())}")
|
||||
|
||||
|
||||
def _get_root_ids_by_doc_id(doc_ids: list[int]) -> dict[int, int]:
|
||||
"""
|
||||
Resolve each provided document id to its root document id.
|
||||
def _resolve_root_and_source_doc(
|
||||
doc: Document,
|
||||
*,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
) -> tuple[Document, Document]:
|
||||
root_doc = get_root_document(doc)
|
||||
|
||||
- If the id is already a root document: root id is itself.
|
||||
- If the id is a version document: root id is its `root_document_id`.
|
||||
"""
|
||||
qs = Document.objects.filter(id__in=doc_ids).only("id", "root_document_id")
|
||||
return {doc.id: doc.root_document_id or doc.id for doc in qs}
|
||||
if source_mode == SourceModeChoices.EXPLICIT_SELECTION:
|
||||
return root_doc, doc
|
||||
|
||||
# Version IDs are explicit by default, only a selected root resolves to latest
|
||||
if doc.root_document_id is not None:
|
||||
return root_doc, doc
|
||||
|
||||
def _get_root_and_current_docs_by_root_id(
|
||||
root_ids: set[int],
|
||||
) -> tuple[dict[int, Document], dict[int, Document]]:
|
||||
"""
|
||||
Returns:
|
||||
- root_docs: root_id -> root Document
|
||||
- current_docs: root_id -> newest version Document (or root if none)
|
||||
"""
|
||||
root_docs = {
|
||||
doc.id: doc
|
||||
for doc in Document.objects.filter(id__in=root_ids).select_related(
|
||||
"owner",
|
||||
)
|
||||
}
|
||||
latest_versions_by_root_id: dict[int, Document] = {}
|
||||
for version_doc in Document.objects.filter(root_document_id__in=root_ids).order_by(
|
||||
"root_document_id",
|
||||
"-id",
|
||||
):
|
||||
root_id = version_doc.root_document_id
|
||||
if root_id is None:
|
||||
continue
|
||||
latest_versions_by_root_id.setdefault(root_id, version_doc)
|
||||
|
||||
current_docs: dict[int, Document] = {
|
||||
root_id: latest_versions_by_root_id.get(root_id, root_docs[root_id])
|
||||
for root_id in root_docs
|
||||
}
|
||||
return root_docs, current_docs
|
||||
return root_doc, get_latest_version_for_root(root_doc)
|
||||
|
||||
|
||||
def set_correspondent(
|
||||
@@ -421,21 +405,32 @@ def rotate(
|
||||
doc_ids: list[int],
|
||||
degrees: int,
|
||||
*,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to rotate {len(doc_ids)} documents by {degrees} degrees.",
|
||||
)
|
||||
doc_to_root_id = _get_root_ids_by_doc_id(doc_ids)
|
||||
root_ids = set(doc_to_root_id.values())
|
||||
root_docs_by_id, current_docs_by_root_id = _get_root_and_current_docs_by_root_id(
|
||||
root_ids,
|
||||
)
|
||||
docs_by_id = {
|
||||
doc.id: doc
|
||||
for doc in Document.objects.select_related("root_document").filter(
|
||||
id__in=doc_ids,
|
||||
)
|
||||
}
|
||||
docs_by_root_id: dict[int, tuple[Document, Document]] = {}
|
||||
for doc_id in doc_ids:
|
||||
doc = docs_by_id.get(doc_id)
|
||||
if doc is None:
|
||||
continue
|
||||
root_doc, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
docs_by_root_id.setdefault(root_doc.id, (root_doc, source_doc))
|
||||
|
||||
import pikepdf
|
||||
|
||||
for root_id in root_ids:
|
||||
root_doc = root_docs_by_id[root_id]
|
||||
source_doc = current_docs_by_root_id[root_id]
|
||||
for root_doc, source_doc in docs_by_root_id.values():
|
||||
if source_doc.mime_type != "application/pdf":
|
||||
logger.warning(
|
||||
f"Document {root_doc.id} is not a PDF, skipping rotation.",
|
||||
@@ -481,12 +476,14 @@ def merge(
|
||||
metadata_document_id: int | None = None,
|
||||
delete_originals: bool = False,
|
||||
archive_fallback: bool = False,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to merge {len(doc_ids)} documents into a single document.",
|
||||
)
|
||||
qs = Document.objects.filter(id__in=doc_ids)
|
||||
qs = Document.objects.select_related("root_document").filter(id__in=doc_ids)
|
||||
docs_by_id = {doc.id: doc for doc in qs}
|
||||
affected_docs: list[int] = []
|
||||
import pikepdf
|
||||
|
||||
@@ -495,14 +492,20 @@ def merge(
|
||||
handoff_asn: int | None = None
|
||||
# use doc_ids to preserve order
|
||||
for doc_id in doc_ids:
|
||||
doc = qs.get(id=doc_id)
|
||||
doc = docs_by_id.get(doc_id)
|
||||
if doc is None:
|
||||
continue
|
||||
_, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
try:
|
||||
doc_path = (
|
||||
doc.archive_path
|
||||
source_doc.archive_path
|
||||
if archive_fallback
|
||||
and doc.mime_type != "application/pdf"
|
||||
and doc.has_archive_version
|
||||
else doc.source_path
|
||||
and source_doc.mime_type != "application/pdf"
|
||||
and source_doc.has_archive_version
|
||||
else source_doc.source_path
|
||||
)
|
||||
with pikepdf.open(str(doc_path)) as pdf:
|
||||
version = max(version, pdf.pdf_version)
|
||||
@@ -584,18 +587,23 @@ def split(
|
||||
pages: list[list[int]],
|
||||
*,
|
||||
delete_originals: bool = False,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to split document {doc_ids[0]} into {len(pages)} documents",
|
||||
)
|
||||
doc = Document.objects.get(id=doc_ids[0])
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_ids[0])
|
||||
_, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
import pikepdf
|
||||
|
||||
consume_tasks = []
|
||||
|
||||
try:
|
||||
with pikepdf.open(doc.source_path) as pdf:
|
||||
with pikepdf.open(source_doc.source_path) as pdf:
|
||||
for idx, split_doc in enumerate(pages):
|
||||
dst: pikepdf.Pdf = pikepdf.new()
|
||||
for page in split_doc:
|
||||
@@ -659,25 +667,17 @@ def delete_pages(
|
||||
doc_ids: list[int],
|
||||
pages: list[int],
|
||||
*,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to delete pages {pages} from {len(doc_ids)} documents",
|
||||
)
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_ids[0])
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
root_doc, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
pages = sorted(pages) # sort pages to avoid index issues
|
||||
import pikepdf
|
||||
|
||||
@@ -722,6 +722,7 @@ def edit_pdf(
|
||||
delete_original: bool = False,
|
||||
update_document: bool = False,
|
||||
include_metadata: bool = True,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
"""
|
||||
@@ -736,19 +737,10 @@ def edit_pdf(
|
||||
f"Editing PDF of document {doc_ids[0]} with {len(operations)} operations",
|
||||
)
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_ids[0])
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
root_doc, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
import pikepdf
|
||||
|
||||
pdf_docs: list[pikepdf.Pdf] = []
|
||||
@@ -859,6 +851,7 @@ def remove_password(
|
||||
update_document: bool = False,
|
||||
delete_original: bool = False,
|
||||
include_metadata: bool = True,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
"""
|
||||
@@ -868,19 +861,10 @@ def remove_password(
|
||||
|
||||
for doc_id in doc_ids:
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_id)
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
root_doc, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
try:
|
||||
logger.info(
|
||||
f"Attempting password removal from document {doc_ids[0]}",
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import datetime
|
||||
import hashlib
|
||||
import os
|
||||
import tempfile
|
||||
from enum import StrEnum
|
||||
@@ -47,7 +48,6 @@ from documents.signals import document_consumption_started
|
||||
from documents.signals import document_updated
|
||||
from documents.signals.handlers import run_workflows
|
||||
from documents.templating.workflows import parse_w_workflow_placeholders
|
||||
from documents.utils import compute_checksum
|
||||
from documents.utils import copy_basic_file_stats
|
||||
from documents.utils import copy_file_with_basic_stats
|
||||
from documents.utils import run_subprocess
|
||||
@@ -196,7 +196,9 @@ class ConsumerPlugin(
|
||||
version_doc = Document(
|
||||
root_document=root_doc_frozen,
|
||||
version_index=next_version_index + 1,
|
||||
checksum=compute_checksum(file_for_checksum),
|
||||
checksum=hashlib.md5(
|
||||
file_for_checksum.read_bytes(),
|
||||
).hexdigest(),
|
||||
content=text or "",
|
||||
page_count=page_count,
|
||||
mime_type=mime_type,
|
||||
@@ -654,9 +656,10 @@ class ConsumerPlugin(
|
||||
document.archive_path,
|
||||
)
|
||||
|
||||
document.archive_checksum = compute_checksum(
|
||||
Path(archive_path),
|
||||
)
|
||||
with Path(archive_path).open("rb") as f:
|
||||
document.archive_checksum = hashlib.md5(
|
||||
f.read(),
|
||||
).hexdigest()
|
||||
|
||||
# Don't save with the lock active. Saving will cause the file
|
||||
# renaming logic to acquire the lock as well.
|
||||
@@ -797,7 +800,7 @@ class ConsumerPlugin(
|
||||
title=title[:127],
|
||||
content=text,
|
||||
mime_type=mime_type,
|
||||
checksum=compute_checksum(file_for_checksum),
|
||||
checksum=hashlib.md5(file_for_checksum.read_bytes()).hexdigest(),
|
||||
created=create_date,
|
||||
modified=create_date,
|
||||
page_count=page_count,
|
||||
@@ -914,9 +917,10 @@ class ConsumerPreflightPlugin(
|
||||
|
||||
def pre_check_duplicate(self) -> None:
|
||||
"""
|
||||
Using the SHA256 of the file, check this exact file doesn't already exist
|
||||
Using the MD5 of the file, check this exact file doesn't already exist
|
||||
"""
|
||||
checksum = compute_checksum(Path(self.input_doc.original_file))
|
||||
with Path(self.input_doc.original_file).open("rb") as f:
|
||||
checksum = hashlib.md5(f.read()).hexdigest()
|
||||
existing_doc = Document.global_objects.filter(
|
||||
Q(checksum=checksum) | Q(archive_checksum=checksum),
|
||||
)
|
||||
|
||||
@@ -3,7 +3,6 @@ import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from itertools import chain
|
||||
from itertools import islice
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
@@ -58,7 +57,6 @@ from documents.models import WorkflowTrigger
|
||||
from documents.settings import EXPORTER_ARCHIVE_NAME
|
||||
from documents.settings import EXPORTER_FILE_NAME
|
||||
from documents.settings import EXPORTER_THUMBNAIL_NAME
|
||||
from documents.utils import compute_checksum
|
||||
from documents.utils import copy_file_with_basic_stats
|
||||
from paperless import version
|
||||
from paperless.models import ApplicationConfiguration
|
||||
@@ -82,6 +80,88 @@ def serialize_queryset_batched(
|
||||
yield serializers.serialize("python", chunk)
|
||||
|
||||
|
||||
class StreamingManifestWriter:
|
||||
"""Incrementally writes a JSON array to a file, one record at a time.
|
||||
|
||||
Writes to <target>.tmp first; on close(), optionally BLAKE2b-compares
|
||||
with the existing file (--compare-json) and renames or discards accordingly.
|
||||
On exception, discard() deletes the tmp file and leaves the original intact.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: Path,
|
||||
*,
|
||||
compare_json: bool = False,
|
||||
files_in_export_dir: "set[Path] | None" = None,
|
||||
) -> None:
|
||||
self._path = path.resolve()
|
||||
self._tmp_path = self._path.with_suffix(self._path.suffix + ".tmp")
|
||||
self._compare_json = compare_json
|
||||
self._files_in_export_dir: set[Path] = (
|
||||
files_in_export_dir if files_in_export_dir is not None else set()
|
||||
)
|
||||
self._file = None
|
||||
self._first = True
|
||||
|
||||
def open(self) -> None:
|
||||
self._path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self._file = self._tmp_path.open("w", encoding="utf-8")
|
||||
self._file.write("[")
|
||||
self._first = True
|
||||
|
||||
def write_record(self, record: dict) -> None:
|
||||
if not self._first:
|
||||
self._file.write(",\n")
|
||||
else:
|
||||
self._first = False
|
||||
self._file.write(
|
||||
json.dumps(record, cls=DjangoJSONEncoder, indent=2, ensure_ascii=False),
|
||||
)
|
||||
|
||||
def write_batch(self, records: list[dict]) -> None:
|
||||
for record in records:
|
||||
self.write_record(record)
|
||||
|
||||
def close(self) -> None:
|
||||
if self._file is None:
|
||||
return
|
||||
self._file.write("\n]")
|
||||
self._file.close()
|
||||
self._file = None
|
||||
self._finalize()
|
||||
|
||||
def discard(self) -> None:
|
||||
if self._file is not None:
|
||||
self._file.close()
|
||||
self._file = None
|
||||
if self._tmp_path.exists():
|
||||
self._tmp_path.unlink()
|
||||
|
||||
def _finalize(self) -> None:
|
||||
"""Compare with existing file (if --compare-json) then rename or discard tmp."""
|
||||
if self._path in self._files_in_export_dir:
|
||||
self._files_in_export_dir.remove(self._path)
|
||||
if self._compare_json:
|
||||
existing_hash = hashlib.blake2b(self._path.read_bytes()).hexdigest()
|
||||
new_hash = hashlib.blake2b(self._tmp_path.read_bytes()).hexdigest()
|
||||
if existing_hash == new_hash:
|
||||
self._tmp_path.unlink()
|
||||
return
|
||||
self._tmp_path.rename(self._path)
|
||||
|
||||
def __enter__(self) -> "StreamingManifestWriter":
|
||||
self.open()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
|
||||
if exc_type is not None:
|
||||
self.discard()
|
||||
else:
|
||||
self.close()
|
||||
return False
|
||||
|
||||
|
||||
class Command(CryptMixin, BaseCommand):
|
||||
help = (
|
||||
"Decrypt and rename all files in our collection into a given target "
|
||||
@@ -323,95 +403,83 @@ class Command(CryptMixin, BaseCommand):
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
manifest_key_to_object_query["log_entries"] = LogEntry.objects.all()
|
||||
|
||||
with transaction.atomic():
|
||||
manifest_dict = {}
|
||||
|
||||
# Build an overall manifest
|
||||
for key, object_query in manifest_key_to_object_query.items():
|
||||
manifest_dict[key] = list(
|
||||
chain.from_iterable(
|
||||
serialize_queryset_batched(
|
||||
object_query,
|
||||
batch_size=self.batch_size,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
self.encrypt_secret_fields(manifest_dict)
|
||||
|
||||
# These are treated specially and included in the per-document manifest
|
||||
# if that setting is enabled. Otherwise, they are just exported to the bulk
|
||||
# manifest
|
||||
document_map: dict[int, Document] = {
|
||||
d.pk: d for d in manifest_key_to_object_query["documents"]
|
||||
}
|
||||
document_manifest = manifest_dict["documents"]
|
||||
|
||||
# 3. Export files from each document
|
||||
for index, document_dict in tqdm.tqdm(
|
||||
enumerate(document_manifest),
|
||||
total=len(document_manifest),
|
||||
disable=self.no_progress_bar,
|
||||
):
|
||||
document = document_map[document_dict["pk"]]
|
||||
|
||||
# 3.1. generate a unique filename
|
||||
base_name = self.generate_base_name(document)
|
||||
|
||||
# 3.2. write filenames into manifest
|
||||
original_target, thumbnail_target, archive_target = (
|
||||
self.generate_document_targets(document, base_name, document_dict)
|
||||
# Crypto setup before streaming begins
|
||||
if self.passphrase:
|
||||
self.setup_crypto(passphrase=self.passphrase)
|
||||
elif MailAccount.objects.count() > 0 or SocialToken.objects.count() > 0:
|
||||
self.stdout.write(
|
||||
self.style.NOTICE(
|
||||
"No passphrase was given, sensitive fields will be in plaintext",
|
||||
),
|
||||
)
|
||||
|
||||
# 3.3. write files to target folder
|
||||
if not self.data_only:
|
||||
self.copy_document_files(
|
||||
document,
|
||||
original_target,
|
||||
thumbnail_target,
|
||||
archive_target,
|
||||
)
|
||||
|
||||
if self.split_manifest:
|
||||
manifest_name = base_name.with_name(f"{base_name.stem}-manifest.json")
|
||||
if self.use_folder_prefix:
|
||||
manifest_name = Path("json") / manifest_name
|
||||
manifest_name = (self.target / manifest_name).resolve()
|
||||
manifest_name.parent.mkdir(parents=True, exist_ok=True)
|
||||
content = [document_manifest[index]]
|
||||
content += list(
|
||||
filter(
|
||||
lambda d: d["fields"]["document"] == document_dict["pk"],
|
||||
manifest_dict["notes"],
|
||||
),
|
||||
)
|
||||
content += list(
|
||||
filter(
|
||||
lambda d: d["fields"]["document"] == document_dict["pk"],
|
||||
manifest_dict["custom_field_instances"],
|
||||
),
|
||||
)
|
||||
|
||||
self.check_and_write_json(
|
||||
content,
|
||||
manifest_name,
|
||||
)
|
||||
|
||||
# These were exported already
|
||||
if self.split_manifest:
|
||||
del manifest_dict["documents"]
|
||||
del manifest_dict["notes"]
|
||||
del manifest_dict["custom_field_instances"]
|
||||
|
||||
# 4.1 write primary manifest to target folder
|
||||
manifest = []
|
||||
for key, item in manifest_dict.items():
|
||||
manifest.extend(item)
|
||||
document_manifest: list[dict] = []
|
||||
manifest_path = (self.target / "manifest.json").resolve()
|
||||
self.check_and_write_json(
|
||||
manifest,
|
||||
|
||||
with StreamingManifestWriter(
|
||||
manifest_path,
|
||||
)
|
||||
compare_json=self.compare_json,
|
||||
files_in_export_dir=self.files_in_export_dir,
|
||||
) as writer:
|
||||
with transaction.atomic():
|
||||
for key, qs in manifest_key_to_object_query.items():
|
||||
if key == "documents":
|
||||
# Accumulate for file-copy loop; written to manifest after
|
||||
for batch in serialize_queryset_batched(
|
||||
qs,
|
||||
batch_size=self.batch_size,
|
||||
):
|
||||
for record in batch:
|
||||
self._encrypt_record_inline(record)
|
||||
document_manifest.extend(batch)
|
||||
elif self.split_manifest and key in (
|
||||
"notes",
|
||||
"custom_field_instances",
|
||||
):
|
||||
# Written per-document in _write_split_manifest
|
||||
pass
|
||||
else:
|
||||
for batch in serialize_queryset_batched(
|
||||
qs,
|
||||
batch_size=self.batch_size,
|
||||
):
|
||||
for record in batch:
|
||||
self._encrypt_record_inline(record)
|
||||
writer.write_batch(batch)
|
||||
|
||||
document_map: dict[int, Document] = {
|
||||
d.pk: d for d in Document.objects.order_by("id")
|
||||
}
|
||||
|
||||
# 3. Export files from each document
|
||||
for document_dict in tqdm.tqdm(
|
||||
document_manifest,
|
||||
total=len(document_manifest),
|
||||
disable=self.no_progress_bar,
|
||||
):
|
||||
document = document_map[document_dict["pk"]]
|
||||
|
||||
# 3.1. generate a unique filename
|
||||
base_name = self.generate_base_name(document)
|
||||
|
||||
# 3.2. write filenames into manifest
|
||||
original_target, thumbnail_target, archive_target = (
|
||||
self.generate_document_targets(document, base_name, document_dict)
|
||||
)
|
||||
|
||||
# 3.3. write files to target folder
|
||||
if not self.data_only:
|
||||
self.copy_document_files(
|
||||
document,
|
||||
original_target,
|
||||
thumbnail_target,
|
||||
archive_target,
|
||||
)
|
||||
|
||||
if self.split_manifest:
|
||||
self._write_split_manifest(document_dict, document, base_name)
|
||||
else:
|
||||
writer.write_record(document_dict)
|
||||
|
||||
# 4.2 write version information to target folder
|
||||
extra_metadata_path = (self.target / "metadata.json").resolve()
|
||||
@@ -533,6 +601,42 @@ class Command(CryptMixin, BaseCommand):
|
||||
archive_target,
|
||||
)
|
||||
|
||||
def _encrypt_record_inline(self, record: dict) -> None:
|
||||
"""Encrypt sensitive fields in a single record, if passphrase is set."""
|
||||
if not self.passphrase:
|
||||
return
|
||||
fields = self.CRYPT_FIELDS_BY_MODEL.get(record.get("model", ""))
|
||||
if fields:
|
||||
for field in fields:
|
||||
if record["fields"].get(field):
|
||||
record["fields"][field] = self.encrypt_string(
|
||||
value=record["fields"][field],
|
||||
)
|
||||
|
||||
def _write_split_manifest(
|
||||
self,
|
||||
document_dict: dict,
|
||||
document: Document,
|
||||
base_name: Path,
|
||||
) -> None:
|
||||
"""Write per-document manifest file for --split-manifest mode."""
|
||||
content = [document_dict]
|
||||
content.extend(
|
||||
serializers.serialize("python", Note.objects.filter(document=document)),
|
||||
)
|
||||
content.extend(
|
||||
serializers.serialize(
|
||||
"python",
|
||||
CustomFieldInstance.objects.filter(document=document),
|
||||
),
|
||||
)
|
||||
manifest_name = base_name.with_name(f"{base_name.stem}-manifest.json")
|
||||
if self.use_folder_prefix:
|
||||
manifest_name = Path("json") / manifest_name
|
||||
manifest_name = (self.target / manifest_name).resolve()
|
||||
manifest_name.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.check_and_write_json(content, manifest_name)
|
||||
|
||||
def check_and_write_json(
|
||||
self,
|
||||
content: list[dict] | dict,
|
||||
@@ -550,14 +654,14 @@ class Command(CryptMixin, BaseCommand):
|
||||
if target in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(target)
|
||||
if self.compare_json:
|
||||
target_checksum = compute_checksum(target)
|
||||
target_checksum = hashlib.blake2b(target.read_bytes()).hexdigest()
|
||||
src_str = json.dumps(
|
||||
content,
|
||||
cls=DjangoJSONEncoder,
|
||||
indent=2,
|
||||
ensure_ascii=False,
|
||||
)
|
||||
src_checksum = hashlib.sha256(src_str.encode("utf-8")).hexdigest()
|
||||
src_checksum = hashlib.blake2b(src_str.encode("utf-8")).hexdigest()
|
||||
if src_checksum == target_checksum:
|
||||
perform_write = False
|
||||
|
||||
@@ -593,7 +697,7 @@ class Command(CryptMixin, BaseCommand):
|
||||
source_stat = source.stat()
|
||||
target_stat = target.stat()
|
||||
if self.compare_checksums and source_checksum:
|
||||
target_checksum = compute_checksum(target)
|
||||
target_checksum = hashlib.md5(target.read_bytes()).hexdigest()
|
||||
perform_copy = target_checksum != source_checksum
|
||||
elif (
|
||||
source_stat.st_mtime != target_stat.st_mtime
|
||||
@@ -607,28 +711,3 @@ class Command(CryptMixin, BaseCommand):
|
||||
if perform_copy:
|
||||
target.parent.mkdir(parents=True, exist_ok=True)
|
||||
copy_file_with_basic_stats(source, target)
|
||||
|
||||
def encrypt_secret_fields(self, manifest: dict) -> None:
|
||||
"""
|
||||
Encrypts certain fields in the export. Currently limited to the mail account password
|
||||
"""
|
||||
|
||||
if self.passphrase:
|
||||
self.setup_crypto(passphrase=self.passphrase)
|
||||
|
||||
for crypt_config in self.CRYPT_FIELDS:
|
||||
exporter_key = crypt_config["exporter_key"]
|
||||
crypt_fields = crypt_config["fields"]
|
||||
for manifest_record in manifest[exporter_key]:
|
||||
for field in crypt_fields:
|
||||
if manifest_record["fields"][field]:
|
||||
manifest_record["fields"][field] = self.encrypt_string(
|
||||
value=manifest_record["fields"][field],
|
||||
)
|
||||
|
||||
elif MailAccount.objects.count() > 0 or SocialToken.objects.count() > 0:
|
||||
self.stdout.write(
|
||||
self.style.NOTICE(
|
||||
"No passphrase was given, sensitive fields will be in plaintext",
|
||||
),
|
||||
)
|
||||
|
||||
@@ -71,7 +71,7 @@ class CryptMixin:
|
||||
key_size = 32
|
||||
kdf_algorithm = "pbkdf2_sha256"
|
||||
|
||||
CRYPT_FIELDS: CryptFields = [
|
||||
CRYPT_FIELDS: list[CryptFields] = [
|
||||
{
|
||||
"exporter_key": "mail_accounts",
|
||||
"model_name": "paperless_mail.mailaccount",
|
||||
@@ -89,6 +89,10 @@ class CryptMixin:
|
||||
],
|
||||
},
|
||||
]
|
||||
# O(1) lookup for per-record encryption; derived from CRYPT_FIELDS at class definition time
|
||||
CRYPT_FIELDS_BY_MODEL: dict[str, list[str]] = {
|
||||
cfg["model_name"]: cfg["fields"] for cfg in CRYPT_FIELDS
|
||||
}
|
||||
|
||||
def get_crypt_params(self) -> dict[str, dict[str, str | int]]:
|
||||
return {
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
import hashlib
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
logger = logging.getLogger("paperless.migrations")
|
||||
|
||||
_CHUNK_SIZE = 65536 # 64 KiB — avoids loading entire files into memory
|
||||
_BATCH_SIZE = 500 # documents per bulk_update call
|
||||
_PROGRESS_INTERVAL = 500 # log a progress line every N documents
|
||||
|
||||
|
||||
def _sha256(path: Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with path.open("rb") as fh:
|
||||
while chunk := fh.read(_CHUNK_SIZE):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def recompute_checksums(apps, schema_editor):
|
||||
"""Recompute all document checksums from MD5 to SHA256."""
|
||||
Document = apps.get_model("documents", "Document")
|
||||
|
||||
total = Document.objects.count()
|
||||
if total == 0:
|
||||
return
|
||||
|
||||
logger.info("Recomputing SHA-256 checksums for %d document(s)...", total)
|
||||
|
||||
batch: list = []
|
||||
processed = 0
|
||||
|
||||
for doc in Document.objects.only(
|
||||
"pk",
|
||||
"filename",
|
||||
"checksum",
|
||||
"archive_filename",
|
||||
"archive_checksum",
|
||||
).iterator(chunk_size=_BATCH_SIZE):
|
||||
updated_fields: list[str] = []
|
||||
|
||||
# Reconstruct source path the same way Document.source_path does
|
||||
fname = str(doc.filename) if doc.filename else f"{doc.pk:07}.pdf"
|
||||
source_path = (settings.ORIGINALS_DIR / Path(fname)).resolve()
|
||||
|
||||
if source_path.exists():
|
||||
doc.checksum = _sha256(source_path)
|
||||
updated_fields.append("checksum")
|
||||
else:
|
||||
logger.warning(
|
||||
"Document %s: original file %s not found, checksum not updated.",
|
||||
doc.pk,
|
||||
source_path,
|
||||
)
|
||||
|
||||
# Mirror Document.has_archive_version: archive_filename is not None
|
||||
if doc.archive_filename is not None:
|
||||
archive_path = (
|
||||
settings.ARCHIVE_DIR / Path(str(doc.archive_filename))
|
||||
).resolve()
|
||||
if archive_path.exists():
|
||||
doc.archive_checksum = _sha256(archive_path)
|
||||
updated_fields.append("archive_checksum")
|
||||
else:
|
||||
logger.warning(
|
||||
"Document %s: archive file %s not found, checksum not updated.",
|
||||
doc.pk,
|
||||
archive_path,
|
||||
)
|
||||
|
||||
if updated_fields:
|
||||
batch.append(doc)
|
||||
|
||||
processed += 1
|
||||
|
||||
if len(batch) >= _BATCH_SIZE:
|
||||
Document.objects.bulk_update(batch, ["checksum", "archive_checksum"])
|
||||
batch.clear()
|
||||
|
||||
if processed % _PROGRESS_INTERVAL == 0:
|
||||
logger.info(
|
||||
"SHA-256 checksum progress: %d/%d (%d%%)",
|
||||
processed,
|
||||
total,
|
||||
processed * 100 // total,
|
||||
)
|
||||
|
||||
if batch:
|
||||
Document.objects.bulk_update(batch, ["checksum", "archive_checksum"])
|
||||
|
||||
logger.info(
|
||||
"SHA-256 checksum recomputation complete: %d document(s) processed.",
|
||||
total,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0016_document_version_index_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="checksum",
|
||||
field=models.CharField(
|
||||
editable=False,
|
||||
help_text="The checksum of the original document.",
|
||||
max_length=64,
|
||||
verbose_name="checksum",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="archive_checksum",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
editable=False,
|
||||
help_text="The checksum of the archived document.",
|
||||
max_length=64,
|
||||
null=True,
|
||||
verbose_name="archive checksum",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(recompute_checksums, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -216,14 +216,14 @@ class Document(SoftDeleteModel, ModelWithOwner): # type: ignore[django-manager-
|
||||
|
||||
checksum = models.CharField(
|
||||
_("checksum"),
|
||||
max_length=64,
|
||||
max_length=32,
|
||||
editable=False,
|
||||
help_text=_("The checksum of the original document."),
|
||||
)
|
||||
|
||||
archive_checksum = models.CharField(
|
||||
_("archive checksum"),
|
||||
max_length=64,
|
||||
max_length=32,
|
||||
editable=False,
|
||||
blank=True,
|
||||
null=True,
|
||||
|
||||
@@ -11,6 +11,7 @@ is an identity function that adds no overhead.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
@@ -29,7 +30,6 @@ from django.utils import timezone
|
||||
|
||||
from documents.models import Document
|
||||
from documents.models import PaperlessTask
|
||||
from documents.utils import compute_checksum
|
||||
from paperless.config import GeneralConfig
|
||||
|
||||
logger = logging.getLogger("paperless.sanity_checker")
|
||||
@@ -218,7 +218,7 @@ def _check_original(
|
||||
|
||||
present_files.discard(source_path)
|
||||
try:
|
||||
checksum = compute_checksum(source_path)
|
||||
checksum = hashlib.md5(source_path.read_bytes()).hexdigest()
|
||||
except OSError as e:
|
||||
messages.error(doc.pk, f"Cannot read original file of document: {e}")
|
||||
else:
|
||||
@@ -255,7 +255,7 @@ def _check_archive(
|
||||
|
||||
present_files.discard(archive_path)
|
||||
try:
|
||||
checksum = compute_checksum(archive_path)
|
||||
checksum = hashlib.md5(archive_path.read_bytes()).hexdigest()
|
||||
except OSError as e:
|
||||
messages.error(
|
||||
doc.pk,
|
||||
|
||||
@@ -1723,6 +1723,15 @@ class BulkEditSerializer(
|
||||
except ValueError:
|
||||
raise serializers.ValidationError("invalid rotation degrees")
|
||||
|
||||
def _validate_source_mode(self, parameters) -> None:
|
||||
source_mode = parameters.get(
|
||||
"source_mode",
|
||||
bulk_edit.SourceModeChoices.LATEST_VERSION,
|
||||
)
|
||||
if source_mode not in bulk_edit.SourceModeChoices.__dict__.values():
|
||||
raise serializers.ValidationError("Invalid source_mode")
|
||||
parameters["source_mode"] = source_mode
|
||||
|
||||
def _validate_parameters_split(self, parameters) -> None:
|
||||
if "pages" not in parameters:
|
||||
raise serializers.ValidationError("pages not specified")
|
||||
@@ -1823,6 +1832,9 @@ class BulkEditSerializer(
|
||||
method = attrs["method"]
|
||||
parameters = attrs["parameters"]
|
||||
|
||||
if "source_mode" in parameters:
|
||||
self._validate_source_mode(parameters)
|
||||
|
||||
if method == bulk_edit.set_correspondent:
|
||||
self._validate_parameters_correspondent(parameters)
|
||||
elif method == bulk_edit.set_document_type:
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import datetime
|
||||
import hashlib
|
||||
import logging
|
||||
import shutil
|
||||
import uuid
|
||||
@@ -62,7 +63,6 @@ from documents.signals import document_updated
|
||||
from documents.signals.handlers import cleanup_document_deletion
|
||||
from documents.signals.handlers import run_workflows
|
||||
from documents.signals.handlers import send_websocket_document_updated
|
||||
from documents.utils import compute_checksum
|
||||
from documents.workflows.utils import get_workflows_for_trigger
|
||||
from paperless.config import AIConfig
|
||||
from paperless_ai.indexing import llm_index_add_or_update_document
|
||||
@@ -323,7 +323,8 @@ def update_document_content_maybe_archive_file(document_id) -> None:
|
||||
with transaction.atomic():
|
||||
oldDocument = Document.objects.get(pk=document.pk)
|
||||
if parser.get_archive_path():
|
||||
checksum = compute_checksum(Path(parser.get_archive_path()))
|
||||
with Path(parser.get_archive_path()).open("rb") as f:
|
||||
checksum = hashlib.md5(f.read()).hexdigest()
|
||||
# I'm going to save first so that in case the file move
|
||||
# fails, the database is rolled back.
|
||||
# We also don't use save() since that triggers the filehandling
|
||||
|
||||
@@ -82,8 +82,8 @@ def sample_doc(
|
||||
|
||||
return DocumentFactory(
|
||||
title="test",
|
||||
checksum="1093cf6e32adbd16b06969df09215d42c4a3a8938cc18b39455953f08d1ff2ab",
|
||||
archive_checksum="706124ecde3c31616992fa979caed17a726b1c9ccdba70e82a4ff796cea97ccf",
|
||||
checksum="42995833e01aea9b3edee44bbfdd7ce1",
|
||||
archive_checksum="62acb0bcbfbcaa62ca6ad3668e4e404b",
|
||||
content="test content",
|
||||
pk=1,
|
||||
filename="0000001.pdf",
|
||||
|
||||
@@ -60,7 +60,7 @@ class DocumentFactory(DjangoModelFactory):
|
||||
model = Document
|
||||
|
||||
title = factory.Faker("sentence", nb_words=4)
|
||||
checksum = factory.Faker("sha256")
|
||||
checksum = factory.Faker("md5")
|
||||
content = factory.Faker("paragraph")
|
||||
correspondent = None
|
||||
document_type = None
|
||||
|
||||
@@ -1395,7 +1395,10 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
{
|
||||
"documents": [self.doc2.id],
|
||||
"method": "edit_pdf",
|
||||
"parameters": {"operations": [{"page": 1}]},
|
||||
"parameters": {
|
||||
"operations": [{"page": 1}],
|
||||
"source_mode": "explicit_selection",
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
@@ -1407,6 +1410,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
args, kwargs = m.call_args
|
||||
self.assertCountEqual(args[0], [self.doc2.id])
|
||||
self.assertEqual(kwargs["operations"], [{"page": 1}])
|
||||
self.assertEqual(kwargs["source_mode"], "explicit_selection")
|
||||
self.assertEqual(kwargs["user"], self.user)
|
||||
|
||||
def test_edit_pdf_invalid_params(self) -> None:
|
||||
@@ -1572,6 +1576,24 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
response.content,
|
||||
)
|
||||
|
||||
# invalid source mode
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [self.doc2.id],
|
||||
"method": "edit_pdf",
|
||||
"parameters": {
|
||||
"operations": [{"page": 1}],
|
||||
"source_mode": "not_a_mode",
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn(b"Invalid source_mode", response.content)
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.edit_pdf")
|
||||
def test_edit_pdf_page_out_of_bounds(self, m) -> None:
|
||||
"""
|
||||
|
||||
@@ -405,7 +405,9 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
self.assertTrue(Document.objects.filter(id=self.doc1.id).exists())
|
||||
self.assertFalse(Document.objects.filter(id=version.id).exists())
|
||||
|
||||
def test_get_root_and_current_doc_mapping(self) -> None:
|
||||
def test_resolve_root_and_source_doc_latest_version_prefers_newest_version(
|
||||
self,
|
||||
) -> None:
|
||||
version1 = Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
@@ -417,18 +419,14 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
root_document=self.doc2,
|
||||
)
|
||||
|
||||
root_ids_by_doc_id = bulk_edit._get_root_ids_by_doc_id(
|
||||
[self.doc2.id, version1.id, version2.id],
|
||||
root_doc, source_doc = bulk_edit._resolve_root_and_source_doc(
|
||||
self.doc2,
|
||||
source_mode="latest_version",
|
||||
)
|
||||
self.assertEqual(root_ids_by_doc_id[self.doc2.id], self.doc2.id)
|
||||
self.assertEqual(root_ids_by_doc_id[version1.id], self.doc2.id)
|
||||
self.assertEqual(root_ids_by_doc_id[version2.id], self.doc2.id)
|
||||
|
||||
root_docs, current_docs = bulk_edit._get_root_and_current_docs_by_root_id(
|
||||
{self.doc2.id},
|
||||
)
|
||||
self.assertEqual(root_docs[self.doc2.id].id, self.doc2.id)
|
||||
self.assertEqual(current_docs[self.doc2.id].id, version2.id)
|
||||
self.assertEqual(root_doc.id, self.doc2.id)
|
||||
self.assertEqual(source_doc.id, version2.id)
|
||||
self.assertNotEqual(source_doc.id, version1.id)
|
||||
|
||||
@mock.patch("documents.tasks.bulk_update_documents.delay")
|
||||
def test_set_permissions(self, m) -> None:
|
||||
@@ -662,6 +660,33 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("pikepdf.open")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_merge_uses_latest_version_source_for_root_selection(
|
||||
self,
|
||||
mock_consume_file,
|
||||
mock_open_pdf,
|
||||
) -> None:
|
||||
version_file = self.dirs.scratch_dir / "sample2_version_merge.pdf"
|
||||
shutil.copy(self.doc2.source_path, version_file)
|
||||
version = Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
root_document=self.doc2,
|
||||
filename=version_file,
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pdf_version = "1.7"
|
||||
fake_pdf.pages = [mock.Mock()]
|
||||
mock_open_pdf.return_value.__enter__.return_value = fake_pdf
|
||||
|
||||
result = bulk_edit.merge([self.doc2.id])
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open_pdf.assert_called_once_with(str(version.source_path))
|
||||
mock_consume_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.delete.si")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_merge_and_delete_originals(
|
||||
@@ -870,6 +895,36 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("pikepdf.open")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_split_uses_latest_version_source_for_root_selection(
|
||||
self,
|
||||
mock_consume_file,
|
||||
mock_open_pdf,
|
||||
mock_group,
|
||||
) -> None:
|
||||
version_file = self.dirs.scratch_dir / "sample2_version_split.pdf"
|
||||
shutil.copy(self.doc2.source_path, version_file)
|
||||
version = Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
root_document=self.doc2,
|
||||
filename=version_file,
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock()]
|
||||
mock_open_pdf.return_value.__enter__.return_value = fake_pdf
|
||||
mock_group.return_value.delay.return_value = None
|
||||
|
||||
result = bulk_edit.split([self.doc2.id], [[1], [2]])
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open_pdf.assert_called_once_with(version.source_path)
|
||||
mock_consume_file.assert_not_called()
|
||||
mock_group.return_value.delay.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.delete.si")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@mock.patch("documents.bulk_edit.chord")
|
||||
@@ -1041,6 +1096,34 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertIsNotNone(overrides)
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_rotate_explicit_selection_uses_root_source_when_root_selected(
|
||||
self,
|
||||
mock_open,
|
||||
mock_consume_delay,
|
||||
mock_magic,
|
||||
):
|
||||
Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
root_document=self.doc2,
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock()]
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
|
||||
result = bulk_edit.rotate(
|
||||
[self.doc2.id],
|
||||
90,
|
||||
source_mode="explicit_selection",
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(self.doc2.source_path)
|
||||
mock_consume_delay.assert_called_once()
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@@ -1065,6 +1148,34 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertIsNotNone(overrides)
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_delete_pages_explicit_selection_uses_root_source_when_root_selected(
|
||||
self,
|
||||
mock_open,
|
||||
mock_consume_delay,
|
||||
mock_magic,
|
||||
):
|
||||
Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
root_document=self.doc2,
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock()]
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
|
||||
result = bulk_edit.delete_pages(
|
||||
[self.doc2.id],
|
||||
[1],
|
||||
source_mode="explicit_selection",
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(self.doc2.source_path)
|
||||
mock_consume_delay.assert_called_once()
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_delete_pages_with_error(self, mock_pdf_save, mock_consume_delay):
|
||||
@@ -1213,6 +1324,40 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertTrue(str(consumable.original_file).endswith("_edited.pdf"))
|
||||
self.assertIsNotNone(overrides)
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.new")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_edit_pdf_explicit_selection_uses_root_source_when_root_selected(
|
||||
self,
|
||||
mock_open,
|
||||
mock_new,
|
||||
mock_consume_delay,
|
||||
mock_magic,
|
||||
):
|
||||
Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
root_document=self.doc2,
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock()]
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
output_pdf = mock.MagicMock()
|
||||
output_pdf.pages = []
|
||||
mock_new.return_value = output_pdf
|
||||
|
||||
result = bulk_edit.edit_pdf(
|
||||
[self.doc2.id],
|
||||
operations=[{"page": 1}],
|
||||
update_document=True,
|
||||
source_mode="explicit_selection",
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(self.doc2.source_path)
|
||||
mock_consume_delay.assert_called_once()
|
||||
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_edit_pdf_without_metadata(
|
||||
@@ -1333,6 +1478,34 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(consumable.root_document_id, doc.id)
|
||||
self.assertIsNotNone(overrides)
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_explicit_selection_uses_root_source_when_root_selected(
|
||||
self,
|
||||
mock_open,
|
||||
mock_consume_delay,
|
||||
mock_magic,
|
||||
) -> None:
|
||||
Document.objects.create(
|
||||
checksum="A-v1",
|
||||
title="A version 1",
|
||||
root_document=self.doc1,
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
|
||||
result = bulk_edit.remove_password(
|
||||
[self.doc1.id],
|
||||
password="secret",
|
||||
update_document=True,
|
||||
source_mode="explicit_selection",
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(self.doc1.source_path, password="secret")
|
||||
mock_consume_delay.assert_called_once()
|
||||
|
||||
@mock.patch("documents.bulk_edit.chord")
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
|
||||
@@ -245,14 +245,8 @@ class TestConsumer(
|
||||
|
||||
self.assertIsFile(document.archive_path)
|
||||
|
||||
self.assertEqual(
|
||||
document.checksum,
|
||||
"1093cf6e32adbd16b06969df09215d42c4a3a8938cc18b39455953f08d1ff2ab",
|
||||
)
|
||||
self.assertEqual(
|
||||
document.archive_checksum,
|
||||
"706124ecde3c31616992fa979caed17a726b1c9ccdba70e82a4ff796cea97ccf",
|
||||
)
|
||||
self.assertEqual(document.checksum, "42995833e01aea9b3edee44bbfdd7ce1")
|
||||
self.assertEqual(document.archive_checksum, "62acb0bcbfbcaa62ca6ad3668e4e404b")
|
||||
|
||||
self.assertIsNotFile(filename)
|
||||
|
||||
|
||||
@@ -63,8 +63,8 @@ class TestExportImport(
|
||||
|
||||
self.d1 = Document.objects.create(
|
||||
content="Content",
|
||||
checksum="1093cf6e32adbd16b06969df09215d42c4a3a8938cc18b39455953f08d1ff2ab",
|
||||
archive_checksum="706124ecde3c31616992fa979caed17a726b1c9ccdba70e82a4ff796cea97ccf",
|
||||
checksum="42995833e01aea9b3edee44bbfdd7ce1",
|
||||
archive_checksum="62acb0bcbfbcaa62ca6ad3668e4e404b",
|
||||
title="wow1",
|
||||
filename="0000001.pdf",
|
||||
mime_type="application/pdf",
|
||||
@@ -72,21 +72,21 @@ class TestExportImport(
|
||||
)
|
||||
self.d2 = Document.objects.create(
|
||||
content="Content",
|
||||
checksum="550d1bae0f746d4f7c6be07054eb20cc2f11988a58ef64ceae45e98f85e92a5b",
|
||||
checksum="9c9691e51741c1f4f41a20896af31770",
|
||||
title="wow2",
|
||||
filename="0000002.pdf",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
self.d3 = Document.objects.create(
|
||||
content="Content",
|
||||
checksum="f1ba6b7ff8548214a75adec228f5468a14fe187f445bc0b9485cbf1c35b15915",
|
||||
checksum="d38d7ed02e988e072caf924e0f3fcb76",
|
||||
title="wow2",
|
||||
filename="0000003.pdf",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
self.d4 = Document.objects.create(
|
||||
content="Content",
|
||||
checksum="a81b16b6b313cfd7e60eb7b12598d1343b58622b4030cfa19a2724a02e98db1b",
|
||||
checksum="82186aaa94f0b98697d704b90fd1c072",
|
||||
title="wow_dec",
|
||||
filename="0000004.pdf",
|
||||
mime_type="application/pdf",
|
||||
@@ -240,7 +240,7 @@ class TestExportImport(
|
||||
)
|
||||
|
||||
with Path(fname).open("rb") as f:
|
||||
checksum = hashlib.sha256(f.read()).hexdigest()
|
||||
checksum = hashlib.md5(f.read()).hexdigest()
|
||||
self.assertEqual(checksum, element["fields"]["checksum"])
|
||||
|
||||
# Generated field "content_length" should not be exported,
|
||||
@@ -254,7 +254,7 @@ class TestExportImport(
|
||||
self.assertIsFile(fname)
|
||||
|
||||
with Path(fname).open("rb") as f:
|
||||
checksum = hashlib.sha256(f.read()).hexdigest()
|
||||
checksum = hashlib.md5(f.read()).hexdigest()
|
||||
self.assertEqual(checksum, element["fields"]["archive_checksum"])
|
||||
|
||||
elif element["model"] == "documents.note":
|
||||
|
||||
@@ -260,8 +260,8 @@ class TestCommandImport(
|
||||
|
||||
Document.objects.create(
|
||||
content="Content",
|
||||
checksum="1093cf6e32adbd16b06969df09215d42c4a3a8938cc18b39455953f08d1ff2ab",
|
||||
archive_checksum="706124ecde3c31616992fa979caed17a726b1c9ccdba70e82a4ff796cea97ccf",
|
||||
checksum="42995833e01aea9b3edee44bbfdd7ce1",
|
||||
archive_checksum="62acb0bcbfbcaa62ca6ad3668e4e404b",
|
||||
title="wow1",
|
||||
filename="0000001.pdf",
|
||||
mime_type="application/pdf",
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import hashlib
|
||||
import logging
|
||||
import shutil
|
||||
from os import utime
|
||||
@@ -129,15 +128,3 @@ def get_boolean(boolstr: str) -> bool:
|
||||
Return a boolean value from a string representation.
|
||||
"""
|
||||
return bool(boolstr.lower() in ("yes", "y", "1", "t", "true"))
|
||||
|
||||
|
||||
def compute_checksum(path: Path, chunk_size: int = 65536) -> str:
|
||||
"""
|
||||
Return the SHA256 hex digest of the file at *path*, reading in chunks
|
||||
of *chunk_size* bytes to avoid loading the entire file into memory.
|
||||
"""
|
||||
h = hashlib.sha256()
|
||||
with path.open("rb") as f:
|
||||
while chunk := f.read(chunk_size):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
@@ -2,7 +2,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: paperless-ngx\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2026-03-04 23:29+0000\n"
|
||||
"POT-Creation-Date: 2026-03-06 20:00+0000\n"
|
||||
"PO-Revision-Date: 2022-02-17 04:17\n"
|
||||
"Last-Translator: \n"
|
||||
"Language-Team: English\n"
|
||||
@@ -1299,7 +1299,7 @@ msgstr ""
|
||||
msgid "workflow runs"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:463 documents/serialisers.py:2332
|
||||
#: documents/serialisers.py:463 documents/serialisers.py:2344
|
||||
msgid "Insufficient permissions."
|
||||
msgstr ""
|
||||
|
||||
@@ -1307,39 +1307,39 @@ msgstr ""
|
||||
msgid "Invalid color."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:1955
|
||||
#: documents/serialisers.py:1967
|
||||
#, python-format
|
||||
msgid "File type %(type)s not supported"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:1999
|
||||
#: documents/serialisers.py:2011
|
||||
#, python-format
|
||||
msgid "Custom field id must be an integer: %(id)s"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2006
|
||||
#: documents/serialisers.py:2018
|
||||
#, python-format
|
||||
msgid "Custom field with id %(id)s does not exist"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2023 documents/serialisers.py:2033
|
||||
#: documents/serialisers.py:2035 documents/serialisers.py:2045
|
||||
msgid ""
|
||||
"Custom fields must be a list of integers or an object mapping ids to values."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2028
|
||||
#: documents/serialisers.py:2040
|
||||
msgid "Some custom fields don't exist or were specified twice."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2175
|
||||
#: documents/serialisers.py:2187
|
||||
msgid "Invalid variable detected."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2388
|
||||
#: documents/serialisers.py:2400
|
||||
msgid "Duplicate document identifiers are not allowed."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2418 documents/views.py:3328
|
||||
#: documents/serialisers.py:2430 documents/views.py:3328
|
||||
#, python-format
|
||||
msgid "Documents not found: %(ids)s"
|
||||
msgstr ""
|
||||
|
||||
@@ -1,107 +1,100 @@
|
||||
from unittest import mock
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
from allauth.account.adapter import get_adapter
|
||||
from allauth.core import context
|
||||
from allauth.socialaccount.adapter import get_adapter as get_social_adapter
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.contrib.auth.models import Group
|
||||
from django.contrib.auth.models import User
|
||||
from django.forms import ValidationError
|
||||
from django.http import HttpRequest
|
||||
from django.test import TestCase
|
||||
from django.test import override_settings
|
||||
from django.urls import reverse
|
||||
from pytest_django.fixtures import SettingsWrapper
|
||||
from pytest_mock import MockerFixture
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
from paperless.adapter import DrfTokenStrategy
|
||||
|
||||
|
||||
class TestCustomAccountAdapter(TestCase):
|
||||
def test_is_open_for_signup(self) -> None:
|
||||
@pytest.mark.django_db
|
||||
class TestCustomAccountAdapter:
|
||||
def test_is_open_for_signup(self, settings: SettingsWrapper) -> None:
|
||||
adapter = get_adapter()
|
||||
|
||||
# With no accounts, signups should be allowed
|
||||
self.assertTrue(adapter.is_open_for_signup(None))
|
||||
assert adapter.is_open_for_signup(None)
|
||||
|
||||
User.objects.create_user("testuser")
|
||||
|
||||
# Test when ACCOUNT_ALLOW_SIGNUPS is True
|
||||
settings.ACCOUNT_ALLOW_SIGNUPS = True
|
||||
self.assertTrue(adapter.is_open_for_signup(None))
|
||||
assert adapter.is_open_for_signup(None)
|
||||
|
||||
# Test when ACCOUNT_ALLOW_SIGNUPS is False
|
||||
settings.ACCOUNT_ALLOW_SIGNUPS = False
|
||||
self.assertFalse(adapter.is_open_for_signup(None))
|
||||
assert not adapter.is_open_for_signup(None)
|
||||
|
||||
def test_is_safe_url(self) -> None:
|
||||
def test_is_safe_url(self, settings: SettingsWrapper) -> None:
|
||||
request = HttpRequest()
|
||||
request.get_host = mock.Mock(return_value="example.com")
|
||||
request.get_host = lambda: "example.com"
|
||||
with context.request_context(request):
|
||||
adapter = get_adapter()
|
||||
with override_settings(ALLOWED_HOSTS=["*"]):
|
||||
# True because request host is same
|
||||
url = "https://example.com"
|
||||
self.assertTrue(adapter.is_safe_url(url))
|
||||
|
||||
url = "https://evil.com"
|
||||
settings.ALLOWED_HOSTS = ["*"]
|
||||
# True because request host is same
|
||||
assert adapter.is_safe_url("https://example.com")
|
||||
# False despite wildcard because request host is different
|
||||
self.assertFalse(adapter.is_safe_url(url))
|
||||
assert not adapter.is_safe_url("https://evil.com")
|
||||
|
||||
settings.ALLOWED_HOSTS = ["example.com"]
|
||||
url = "https://example.com"
|
||||
# True because request host is same
|
||||
self.assertTrue(adapter.is_safe_url(url))
|
||||
assert adapter.is_safe_url("https://example.com")
|
||||
|
||||
settings.ALLOWED_HOSTS = ["*", "example.com"]
|
||||
url = "//evil.com"
|
||||
# False because request host is not in allowed hosts
|
||||
self.assertFalse(adapter.is_safe_url(url))
|
||||
assert not adapter.is_safe_url("//evil.com")
|
||||
|
||||
@mock.patch("allauth.core.internal.ratelimit.consume", return_value=True)
|
||||
def test_pre_authenticate(self, mock_consume) -> None:
|
||||
def test_pre_authenticate(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
mocker: MockerFixture,
|
||||
) -> None:
|
||||
mocker.patch("allauth.core.internal.ratelimit.consume", return_value=True)
|
||||
adapter = get_adapter()
|
||||
request = HttpRequest()
|
||||
request.get_host = mock.Mock(return_value="example.com")
|
||||
request.get_host = lambda: "example.com"
|
||||
|
||||
settings.DISABLE_REGULAR_LOGIN = False
|
||||
adapter.pre_authenticate(request)
|
||||
|
||||
settings.DISABLE_REGULAR_LOGIN = True
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
adapter.pre_authenticate(request)
|
||||
|
||||
def test_get_reset_password_from_key_url(self) -> None:
|
||||
def test_get_reset_password_from_key_url(self, settings: SettingsWrapper) -> None:
|
||||
request = HttpRequest()
|
||||
request.get_host = mock.Mock(return_value="foo.org")
|
||||
request.get_host = lambda: "foo.org"
|
||||
with context.request_context(request):
|
||||
adapter = get_adapter()
|
||||
|
||||
# Test when PAPERLESS_URL is None
|
||||
with override_settings(
|
||||
PAPERLESS_URL=None,
|
||||
ACCOUNT_DEFAULT_HTTP_PROTOCOL="https",
|
||||
):
|
||||
expected_url = f"https://foo.org{reverse('account_reset_password_from_key', kwargs={'uidb36': 'UID', 'key': 'KEY'})}"
|
||||
self.assertEqual(
|
||||
adapter.get_reset_password_from_key_url("UID-KEY"),
|
||||
expected_url,
|
||||
)
|
||||
settings.PAPERLESS_URL = None
|
||||
settings.ACCOUNT_DEFAULT_HTTP_PROTOCOL = "https"
|
||||
expected_url = f"https://foo.org{reverse('account_reset_password_from_key', kwargs={'uidb36': 'UID', 'key': 'KEY'})}"
|
||||
assert adapter.get_reset_password_from_key_url("UID-KEY") == expected_url
|
||||
|
||||
# Test when PAPERLESS_URL is not None
|
||||
with override_settings(PAPERLESS_URL="https://bar.com"):
|
||||
expected_url = f"https://bar.com{reverse('account_reset_password_from_key', kwargs={'uidb36': 'UID', 'key': 'KEY'})}"
|
||||
self.assertEqual(
|
||||
adapter.get_reset_password_from_key_url("UID-KEY"),
|
||||
expected_url,
|
||||
)
|
||||
settings.PAPERLESS_URL = "https://bar.com"
|
||||
expected_url = f"https://bar.com{reverse('account_reset_password_from_key', kwargs={'uidb36': 'UID', 'key': 'KEY'})}"
|
||||
assert adapter.get_reset_password_from_key_url("UID-KEY") == expected_url
|
||||
|
||||
@override_settings(ACCOUNT_DEFAULT_GROUPS=["group1", "group2"])
|
||||
def test_save_user_adds_groups(self) -> None:
|
||||
def test_save_user_adds_groups(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
mocker: MockerFixture,
|
||||
) -> None:
|
||||
settings.ACCOUNT_DEFAULT_GROUPS = ["group1", "group2"]
|
||||
Group.objects.create(name="group1")
|
||||
user = User.objects.create_user("testuser")
|
||||
adapter = get_adapter()
|
||||
form = mock.Mock(
|
||||
form = mocker.MagicMock(
|
||||
cleaned_data={
|
||||
"username": "testuser",
|
||||
"email": "user@example.com",
|
||||
@@ -110,88 +103,81 @@ class TestCustomAccountAdapter(TestCase):
|
||||
|
||||
user = adapter.save_user(HttpRequest(), user, form, commit=True)
|
||||
|
||||
self.assertEqual(user.groups.count(), 1)
|
||||
self.assertTrue(user.groups.filter(name="group1").exists())
|
||||
self.assertFalse(user.groups.filter(name="group2").exists())
|
||||
assert user.groups.count() == 1
|
||||
assert user.groups.filter(name="group1").exists()
|
||||
assert not user.groups.filter(name="group2").exists()
|
||||
|
||||
def test_fresh_install_save_creates_superuser(self) -> None:
|
||||
def test_fresh_install_save_creates_superuser(self, mocker: MockerFixture) -> None:
|
||||
adapter = get_adapter()
|
||||
form = mock.Mock(
|
||||
form = mocker.MagicMock(
|
||||
cleaned_data={
|
||||
"username": "testuser",
|
||||
"email": "user@paperless-ngx.com",
|
||||
},
|
||||
)
|
||||
user = adapter.save_user(HttpRequest(), User(), form, commit=True)
|
||||
self.assertTrue(user.is_superuser)
|
||||
assert user.is_superuser
|
||||
|
||||
# Next time, it should not create a superuser
|
||||
form = mock.Mock(
|
||||
form = mocker.MagicMock(
|
||||
cleaned_data={
|
||||
"username": "testuser2",
|
||||
"email": "user2@paperless-ngx.com",
|
||||
},
|
||||
)
|
||||
user2 = adapter.save_user(HttpRequest(), User(), form, commit=True)
|
||||
self.assertFalse(user2.is_superuser)
|
||||
assert not user2.is_superuser
|
||||
|
||||
|
||||
class TestCustomSocialAccountAdapter(TestCase):
|
||||
def test_is_open_for_signup(self) -> None:
|
||||
class TestCustomSocialAccountAdapter:
|
||||
@pytest.mark.django_db
|
||||
def test_is_open_for_signup(self, settings: SettingsWrapper) -> None:
|
||||
adapter = get_social_adapter()
|
||||
|
||||
# Test when SOCIALACCOUNT_ALLOW_SIGNUPS is True
|
||||
settings.SOCIALACCOUNT_ALLOW_SIGNUPS = True
|
||||
self.assertTrue(adapter.is_open_for_signup(None, None))
|
||||
assert adapter.is_open_for_signup(None, None)
|
||||
|
||||
# Test when SOCIALACCOUNT_ALLOW_SIGNUPS is False
|
||||
settings.SOCIALACCOUNT_ALLOW_SIGNUPS = False
|
||||
self.assertFalse(adapter.is_open_for_signup(None, None))
|
||||
assert not adapter.is_open_for_signup(None, None)
|
||||
|
||||
def test_get_connect_redirect_url(self) -> None:
|
||||
adapter = get_social_adapter()
|
||||
request = None
|
||||
socialaccount = None
|
||||
assert adapter.get_connect_redirect_url(None, None) == reverse("base")
|
||||
|
||||
# Test the default URL
|
||||
expected_url = reverse("base")
|
||||
self.assertEqual(
|
||||
adapter.get_connect_redirect_url(request, socialaccount),
|
||||
expected_url,
|
||||
)
|
||||
|
||||
@override_settings(SOCIAL_ACCOUNT_DEFAULT_GROUPS=["group1", "group2"])
|
||||
def test_save_user_adds_groups(self) -> None:
|
||||
@pytest.mark.django_db
|
||||
def test_save_user_adds_groups(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
mocker: MockerFixture,
|
||||
) -> None:
|
||||
settings.SOCIAL_ACCOUNT_DEFAULT_GROUPS = ["group1", "group2"]
|
||||
Group.objects.create(name="group1")
|
||||
adapter = get_social_adapter()
|
||||
request = HttpRequest()
|
||||
user = User.objects.create_user("testuser")
|
||||
sociallogin = mock.Mock(
|
||||
user=user,
|
||||
)
|
||||
sociallogin = mocker.MagicMock(user=user)
|
||||
|
||||
user = adapter.save_user(request, sociallogin, None)
|
||||
user = adapter.save_user(HttpRequest(), sociallogin, None)
|
||||
|
||||
self.assertEqual(user.groups.count(), 1)
|
||||
self.assertTrue(user.groups.filter(name="group1").exists())
|
||||
self.assertFalse(user.groups.filter(name="group2").exists())
|
||||
assert user.groups.count() == 1
|
||||
assert user.groups.filter(name="group1").exists()
|
||||
assert not user.groups.filter(name="group2").exists()
|
||||
|
||||
def test_error_logged_on_authentication_error(self) -> None:
|
||||
def test_error_logged_on_authentication_error(
|
||||
self,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
adapter = get_social_adapter()
|
||||
request = HttpRequest()
|
||||
with self.assertLogs("paperless.auth", level="INFO") as log_cm:
|
||||
with caplog.at_level(logging.INFO, logger="paperless.auth"):
|
||||
adapter.on_authentication_error(
|
||||
request,
|
||||
HttpRequest(),
|
||||
provider="test-provider",
|
||||
error="Error",
|
||||
exception="Test authentication error",
|
||||
)
|
||||
self.assertTrue(
|
||||
any("Test authentication error" in message for message in log_cm.output),
|
||||
)
|
||||
assert any("Test authentication error" in msg for msg in caplog.messages)
|
||||
|
||||
|
||||
class TestDrfTokenStrategy(TestCase):
|
||||
@pytest.mark.django_db
|
||||
class TestDrfTokenStrategy:
|
||||
def test_create_access_token_creates_new_token(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -201,7 +187,6 @@ class TestDrfTokenStrategy(TestCase):
|
||||
THEN:
|
||||
- A new token is created and its key is returned
|
||||
"""
|
||||
|
||||
user = User.objects.create_user("testuser")
|
||||
request = HttpRequest()
|
||||
request.user = user
|
||||
@@ -209,13 +194,9 @@ class TestDrfTokenStrategy(TestCase):
|
||||
strategy = DrfTokenStrategy()
|
||||
token_key = strategy.create_access_token(request)
|
||||
|
||||
# Verify a token was created
|
||||
self.assertIsNotNone(token_key)
|
||||
self.assertTrue(Token.objects.filter(user=user).exists())
|
||||
|
||||
# Verify the returned key matches the created token
|
||||
token = Token.objects.get(user=user)
|
||||
self.assertEqual(token_key, token.key)
|
||||
assert token_key is not None
|
||||
assert Token.objects.filter(user=user).exists()
|
||||
assert token_key == Token.objects.get(user=user).key
|
||||
|
||||
def test_create_access_token_returns_existing_token(self) -> None:
|
||||
"""
|
||||
@@ -226,7 +207,6 @@ class TestDrfTokenStrategy(TestCase):
|
||||
THEN:
|
||||
- The same token key is returned (no new token created)
|
||||
"""
|
||||
|
||||
user = User.objects.create_user("testuser")
|
||||
existing_token = Token.objects.create(user=user)
|
||||
|
||||
@@ -236,11 +216,8 @@ class TestDrfTokenStrategy(TestCase):
|
||||
strategy = DrfTokenStrategy()
|
||||
token_key = strategy.create_access_token(request)
|
||||
|
||||
# Verify the existing token key is returned
|
||||
self.assertEqual(token_key, existing_token.key)
|
||||
|
||||
# Verify only one token exists (no duplicate created)
|
||||
self.assertEqual(Token.objects.filter(user=user).count(), 1)
|
||||
assert token_key == existing_token.key
|
||||
assert Token.objects.filter(user=user).count() == 1
|
||||
|
||||
def test_create_access_token_returns_none_for_unauthenticated_user(self) -> None:
|
||||
"""
|
||||
@@ -251,12 +228,11 @@ class TestDrfTokenStrategy(TestCase):
|
||||
THEN:
|
||||
- None is returned and no token is created
|
||||
"""
|
||||
|
||||
request = HttpRequest()
|
||||
request.user = AnonymousUser()
|
||||
|
||||
strategy = DrfTokenStrategy()
|
||||
token_key = strategy.create_access_token(request)
|
||||
|
||||
self.assertIsNone(token_key)
|
||||
self.assertEqual(Token.objects.count(), 0)
|
||||
assert token_key is None
|
||||
assert Token.objects.count() == 0
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
import os
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from django.core.checks import Error
|
||||
from django.core.checks import Warning
|
||||
from django.test import TestCase
|
||||
from django.test import override_settings
|
||||
from pytest_django.fixtures import SettingsWrapper
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from documents.tests.utils import FileSystemAssertsMixin
|
||||
from paperless.checks import audit_log_check
|
||||
from paperless.checks import binaries_check
|
||||
from paperless.checks import check_deprecated_db_settings
|
||||
@@ -20,54 +19,84 @@ from paperless.checks import paths_check
|
||||
from paperless.checks import settings_values_check
|
||||
|
||||
|
||||
class TestChecks(DirectoriesMixin, TestCase):
|
||||
def test_binaries(self) -> None:
|
||||
self.assertEqual(binaries_check(None), [])
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class PaperlessTestDirs:
|
||||
data_dir: Path
|
||||
media_dir: Path
|
||||
consumption_dir: Path
|
||||
|
||||
@override_settings(CONVERT_BINARY="uuuhh")
|
||||
def test_binaries_fail(self) -> None:
|
||||
self.assertEqual(len(binaries_check(None)), 1)
|
||||
|
||||
def test_paths_check(self) -> None:
|
||||
self.assertEqual(paths_check(None), [])
|
||||
# TODO: consolidate with documents/tests/conftest.py PaperlessDirs/paperless_dirs
|
||||
# once the paperless and documents test suites are ready to share fixtures.
|
||||
@pytest.fixture()
|
||||
def directories(tmp_path: Path, settings: SettingsWrapper) -> PaperlessTestDirs:
|
||||
data_dir = tmp_path / "data"
|
||||
media_dir = tmp_path / "media"
|
||||
consumption_dir = tmp_path / "consumption"
|
||||
|
||||
@override_settings(
|
||||
MEDIA_ROOT=Path("uuh"),
|
||||
DATA_DIR=Path("whatever"),
|
||||
CONSUMPTION_DIR=Path("idontcare"),
|
||||
for d in (data_dir, media_dir, consumption_dir):
|
||||
d.mkdir()
|
||||
|
||||
settings.DATA_DIR = data_dir
|
||||
settings.MEDIA_ROOT = media_dir
|
||||
settings.CONSUMPTION_DIR = consumption_dir
|
||||
|
||||
return PaperlessTestDirs(
|
||||
data_dir=data_dir,
|
||||
media_dir=media_dir,
|
||||
consumption_dir=consumption_dir,
|
||||
)
|
||||
def test_paths_check_dont_exist(self) -> None:
|
||||
msgs = paths_check(None)
|
||||
self.assertEqual(len(msgs), 3, str(msgs))
|
||||
|
||||
for msg in msgs:
|
||||
self.assertTrue(msg.msg.endswith("is set but doesn't exist."))
|
||||
|
||||
def test_paths_check_no_access(self) -> None:
|
||||
Path(self.dirs.data_dir).chmod(0o000)
|
||||
Path(self.dirs.media_dir).chmod(0o000)
|
||||
Path(self.dirs.consumption_dir).chmod(0o000)
|
||||
class TestChecks:
|
||||
def test_binaries(self) -> None:
|
||||
assert binaries_check(None) == []
|
||||
|
||||
self.addCleanup(os.chmod, self.dirs.data_dir, 0o777)
|
||||
self.addCleanup(os.chmod, self.dirs.media_dir, 0o777)
|
||||
self.addCleanup(os.chmod, self.dirs.consumption_dir, 0o777)
|
||||
def test_binaries_fail(self, settings: SettingsWrapper) -> None:
|
||||
settings.CONVERT_BINARY = "uuuhh"
|
||||
assert len(binaries_check(None)) == 1
|
||||
|
||||
@pytest.mark.usefixtures("directories")
|
||||
def test_paths_check(self) -> None:
|
||||
assert paths_check(None) == []
|
||||
|
||||
def test_paths_check_dont_exist(self, settings: SettingsWrapper) -> None:
|
||||
settings.MEDIA_ROOT = Path("uuh")
|
||||
settings.DATA_DIR = Path("whatever")
|
||||
settings.CONSUMPTION_DIR = Path("idontcare")
|
||||
|
||||
msgs = paths_check(None)
|
||||
self.assertEqual(len(msgs), 3)
|
||||
|
||||
assert len(msgs) == 3, str(msgs)
|
||||
for msg in msgs:
|
||||
self.assertTrue(msg.msg.endswith("is not writeable"))
|
||||
assert msg.msg.endswith("is set but doesn't exist.")
|
||||
|
||||
@override_settings(DEBUG=False)
|
||||
def test_debug_disabled(self) -> None:
|
||||
self.assertEqual(debug_mode_check(None), [])
|
||||
def test_paths_check_no_access(self, directories: PaperlessTestDirs) -> None:
|
||||
directories.data_dir.chmod(0o000)
|
||||
directories.media_dir.chmod(0o000)
|
||||
directories.consumption_dir.chmod(0o000)
|
||||
|
||||
@override_settings(DEBUG=True)
|
||||
def test_debug_enabled(self) -> None:
|
||||
self.assertEqual(len(debug_mode_check(None)), 1)
|
||||
try:
|
||||
msgs = paths_check(None)
|
||||
finally:
|
||||
directories.data_dir.chmod(0o777)
|
||||
directories.media_dir.chmod(0o777)
|
||||
directories.consumption_dir.chmod(0o777)
|
||||
|
||||
assert len(msgs) == 3
|
||||
for msg in msgs:
|
||||
assert msg.msg.endswith("is not writeable")
|
||||
|
||||
def test_debug_disabled(self, settings: SettingsWrapper) -> None:
|
||||
settings.DEBUG = False
|
||||
assert debug_mode_check(None) == []
|
||||
|
||||
def test_debug_enabled(self, settings: SettingsWrapper) -> None:
|
||||
settings.DEBUG = True
|
||||
assert len(debug_mode_check(None)) == 1
|
||||
|
||||
|
||||
class TestSettingsChecksAgainstDefaults(DirectoriesMixin, TestCase):
|
||||
class TestSettingsChecksAgainstDefaults:
|
||||
def test_all_valid(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -78,104 +107,71 @@ class TestSettingsChecksAgainstDefaults(DirectoriesMixin, TestCase):
|
||||
- No system check errors reported
|
||||
"""
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 0)
|
||||
assert len(msgs) == 0
|
||||
|
||||
|
||||
class TestOcrSettingsChecks(DirectoriesMixin, TestCase):
|
||||
@override_settings(OCR_OUTPUT_TYPE="notapdf")
|
||||
def test_invalid_output_type(self) -> None:
|
||||
class TestOcrSettingsChecks:
|
||||
@pytest.mark.parametrize(
|
||||
("setting", "value", "expected_msg"),
|
||||
[
|
||||
pytest.param(
|
||||
"OCR_OUTPUT_TYPE",
|
||||
"notapdf",
|
||||
'OCR output type "notapdf"',
|
||||
id="invalid-output-type",
|
||||
),
|
||||
pytest.param(
|
||||
"OCR_MODE",
|
||||
"makeitso",
|
||||
'OCR output mode "makeitso"',
|
||||
id="invalid-mode",
|
||||
),
|
||||
pytest.param(
|
||||
"OCR_MODE",
|
||||
"skip_noarchive",
|
||||
"deprecated",
|
||||
id="deprecated-mode",
|
||||
),
|
||||
pytest.param(
|
||||
"OCR_SKIP_ARCHIVE_FILE",
|
||||
"invalid",
|
||||
'OCR_SKIP_ARCHIVE_FILE setting "invalid"',
|
||||
id="invalid-skip-archive-file",
|
||||
),
|
||||
pytest.param(
|
||||
"OCR_CLEAN",
|
||||
"cleanme",
|
||||
'OCR clean mode "cleanme"',
|
||||
id="invalid-clean",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_invalid_setting_produces_one_error(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
setting: str,
|
||||
value: str,
|
||||
expected_msg: str,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
- OCR output type is invalid
|
||||
- One OCR setting is set to an invalid value
|
||||
WHEN:
|
||||
- Settings are validated
|
||||
THEN:
|
||||
- system check error reported for OCR output type
|
||||
- Exactly one system check error is reported containing the expected message
|
||||
"""
|
||||
setattr(settings, setting, value)
|
||||
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('OCR output type "notapdf"', msg.msg)
|
||||
|
||||
@override_settings(OCR_MODE="makeitso")
|
||||
def test_invalid_ocr_type(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
- OCR type is invalid
|
||||
WHEN:
|
||||
- Settings are validated
|
||||
THEN:
|
||||
- system check error reported for OCR type
|
||||
"""
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('OCR output mode "makeitso"', msg.msg)
|
||||
|
||||
@override_settings(OCR_MODE="skip_noarchive")
|
||||
def test_deprecated_ocr_type(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
- OCR type is deprecated
|
||||
WHEN:
|
||||
- Settings are validated
|
||||
THEN:
|
||||
- deprecation warning reported for OCR type
|
||||
"""
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn("deprecated", msg.msg)
|
||||
|
||||
@override_settings(OCR_SKIP_ARCHIVE_FILE="invalid")
|
||||
def test_invalid_ocr_skip_archive_file(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
- OCR_SKIP_ARCHIVE_FILE is invalid
|
||||
WHEN:
|
||||
- Settings are validated
|
||||
THEN:
|
||||
- system check error reported for OCR_SKIP_ARCHIVE_FILE
|
||||
"""
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('OCR_SKIP_ARCHIVE_FILE setting "invalid"', msg.msg)
|
||||
|
||||
@override_settings(OCR_CLEAN="cleanme")
|
||||
def test_invalid_ocr_clean(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
- OCR cleaning type is invalid
|
||||
WHEN:
|
||||
- Settings are validated
|
||||
THEN:
|
||||
- system check error reported for OCR cleaning type
|
||||
"""
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('OCR clean mode "cleanme"', msg.msg)
|
||||
assert len(msgs) == 1
|
||||
assert expected_msg in msgs[0].msg
|
||||
|
||||
|
||||
class TestTimezoneSettingsChecks(DirectoriesMixin, TestCase):
|
||||
@override_settings(TIME_ZONE="TheMoon\\MyCrater")
|
||||
def test_invalid_timezone(self) -> None:
|
||||
class TestTimezoneSettingsChecks:
|
||||
def test_invalid_timezone(self, settings: SettingsWrapper) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
@@ -185,17 +181,16 @@ class TestTimezoneSettingsChecks(DirectoriesMixin, TestCase):
|
||||
THEN:
|
||||
- system check error reported for timezone
|
||||
"""
|
||||
settings.TIME_ZONE = "TheMoon\\MyCrater"
|
||||
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('Timezone "TheMoon\\MyCrater"', msg.msg)
|
||||
assert len(msgs) == 1
|
||||
assert 'Timezone "TheMoon\\MyCrater"' in msgs[0].msg
|
||||
|
||||
|
||||
class TestEmailCertSettingsChecks(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
@override_settings(EMAIL_CERTIFICATE_FILE=Path("/tmp/not_actually_here.pem"))
|
||||
def test_not_valid_file(self) -> None:
|
||||
class TestEmailCertSettingsChecks:
|
||||
def test_not_valid_file(self, settings: SettingsWrapper) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
@@ -205,19 +200,22 @@ class TestEmailCertSettingsChecks(DirectoriesMixin, FileSystemAssertsMixin, Test
|
||||
THEN:
|
||||
- system check error reported for email certificate
|
||||
"""
|
||||
self.assertIsNotFile("/tmp/not_actually_here.pem")
|
||||
cert_path = Path("/tmp/not_actually_here.pem")
|
||||
assert not cert_path.is_file()
|
||||
settings.EMAIL_CERTIFICATE_FILE = cert_path
|
||||
|
||||
msgs = settings_values_check(None)
|
||||
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn("Email cert /tmp/not_actually_here.pem is not a file", msg.msg)
|
||||
assert len(msgs) == 1
|
||||
assert "Email cert /tmp/not_actually_here.pem is not a file" in msgs[0].msg
|
||||
|
||||
|
||||
class TestAuditLogChecks(TestCase):
|
||||
def test_was_enabled_once(self) -> None:
|
||||
class TestAuditLogChecks:
|
||||
def test_was_enabled_once(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
mocker: MockerFixture,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Audit log is not enabled
|
||||
@@ -226,23 +224,18 @@ class TestAuditLogChecks(TestCase):
|
||||
THEN:
|
||||
- system check error reported for disabling audit log
|
||||
"""
|
||||
introspect_mock = mock.MagicMock()
|
||||
settings.AUDIT_LOG_ENABLED = False
|
||||
introspect_mock = mocker.MagicMock()
|
||||
introspect_mock.introspection.table_names.return_value = ["auditlog_logentry"]
|
||||
with override_settings(AUDIT_LOG_ENABLED=False):
|
||||
with mock.patch.dict(
|
||||
"paperless.checks.connections",
|
||||
{"default": introspect_mock},
|
||||
):
|
||||
msgs = audit_log_check(None)
|
||||
mocker.patch.dict(
|
||||
"paperless.checks.connections",
|
||||
{"default": introspect_mock},
|
||||
)
|
||||
|
||||
self.assertEqual(len(msgs), 1)
|
||||
msgs = audit_log_check(None)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn(
|
||||
("auditlog table was found but audit log is disabled."),
|
||||
msg.msg,
|
||||
)
|
||||
assert len(msgs) == 1
|
||||
assert "auditlog table was found but audit log is disabled." in msgs[0].msg
|
||||
|
||||
|
||||
DEPRECATED_VARS: dict[str, str] = {
|
||||
@@ -271,20 +264,16 @@ class TestDeprecatedDbSettings:
|
||||
@pytest.mark.parametrize(
|
||||
("env_var", "db_option_key"),
|
||||
[
|
||||
("PAPERLESS_DB_TIMEOUT", "timeout"),
|
||||
("PAPERLESS_DB_POOLSIZE", "pool.min_size / pool.max_size"),
|
||||
("PAPERLESS_DBSSLMODE", "sslmode"),
|
||||
("PAPERLESS_DBSSLROOTCERT", "sslrootcert"),
|
||||
("PAPERLESS_DBSSLCERT", "sslcert"),
|
||||
("PAPERLESS_DBSSLKEY", "sslkey"),
|
||||
],
|
||||
ids=[
|
||||
"db-timeout",
|
||||
"db-poolsize",
|
||||
"ssl-mode",
|
||||
"ssl-rootcert",
|
||||
"ssl-cert",
|
||||
"ssl-key",
|
||||
pytest.param("PAPERLESS_DB_TIMEOUT", "timeout", id="db-timeout"),
|
||||
pytest.param(
|
||||
"PAPERLESS_DB_POOLSIZE",
|
||||
"pool.min_size / pool.max_size",
|
||||
id="db-poolsize",
|
||||
),
|
||||
pytest.param("PAPERLESS_DBSSLMODE", "sslmode", id="ssl-mode"),
|
||||
pytest.param("PAPERLESS_DBSSLROOTCERT", "sslrootcert", id="ssl-rootcert"),
|
||||
pytest.param("PAPERLESS_DBSSLCERT", "sslcert", id="ssl-cert"),
|
||||
pytest.param("PAPERLESS_DBSSLKEY", "sslkey", id="ssl-key"),
|
||||
],
|
||||
)
|
||||
def test_single_deprecated_var_produces_one_warning(
|
||||
@@ -403,7 +392,10 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
"""Test suite for check_v3_minimum_upgrade_version system check."""
|
||||
|
||||
@pytest.fixture
|
||||
def build_conn_mock(self, mocker: MockerFixture):
|
||||
def build_conn_mock(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
) -> Callable[[list[str], list[str]], mock.MagicMock]:
|
||||
"""Factory fixture that builds a connections['default'] mock.
|
||||
|
||||
Usage::
|
||||
@@ -423,7 +415,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_no_migrations_table_fresh_install(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -442,7 +434,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_no_documents_migrations_fresh_install(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -461,7 +453,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_v3_state_with_0001_squashed(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -485,7 +477,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_v3_state_with_0002_squashed_only(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -504,7 +496,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_v2_20_9_state_ready_to_upgrade(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -531,7 +523,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_v2_20_8_raises_error(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -558,7 +550,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_very_old_version_raises_error(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -585,7 +577,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_error_hint_mentions_v2_20_9(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
|
||||
@@ -9,35 +9,50 @@ from paperless.utils import ocr_to_dateparser_languages
|
||||
@pytest.mark.parametrize(
|
||||
("ocr_language", "expected"),
|
||||
[
|
||||
# One language
|
||||
("eng", ["en"]),
|
||||
# Multiple languages
|
||||
("fra+ita+lao", ["fr", "it", "lo"]),
|
||||
# Languages that don't have a two-letter equivalent
|
||||
("fil", ["fil"]),
|
||||
# Languages with a script part supported by dateparser
|
||||
("aze_cyrl+srp_latn", ["az-Cyrl", "sr-Latn"]),
|
||||
# Languages with a script part not supported by dateparser
|
||||
# In this case, default to the language without script
|
||||
("deu_frak", ["de"]),
|
||||
# Traditional and simplified chinese don't have the same name in dateparser,
|
||||
# so they're converted to the general chinese language
|
||||
("chi_tra+chi_sim", ["zh"]),
|
||||
# If a language is not supported by dateparser, fallback to the supported ones
|
||||
("eng+unsupported_language+por", ["en", "pt"]),
|
||||
# If no language is supported, fallback to default
|
||||
("unsupported1+unsupported2", []),
|
||||
# Duplicate languages, should not duplicate in result
|
||||
("eng+eng", ["en"]),
|
||||
# Language with script, but script is not mapped
|
||||
("ita_unknownscript", ["it"]),
|
||||
pytest.param("eng", ["en"], id="single-language"),
|
||||
pytest.param("fra+ita+lao", ["fr", "it", "lo"], id="multiple-languages"),
|
||||
pytest.param("fil", ["fil"], id="no-two-letter-equivalent"),
|
||||
pytest.param(
|
||||
"aze_cyrl+srp_latn",
|
||||
["az-Cyrl", "sr-Latn"],
|
||||
id="script-supported-by-dateparser",
|
||||
),
|
||||
pytest.param(
|
||||
"deu_frak",
|
||||
["de"],
|
||||
id="script-not-supported-falls-back-to-language",
|
||||
),
|
||||
pytest.param(
|
||||
"chi_tra+chi_sim",
|
||||
["zh"],
|
||||
id="chinese-variants-collapse-to-general",
|
||||
),
|
||||
pytest.param(
|
||||
"eng+unsupported_language+por",
|
||||
["en", "pt"],
|
||||
id="unsupported-language-skipped",
|
||||
),
|
||||
pytest.param(
|
||||
"unsupported1+unsupported2",
|
||||
[],
|
||||
id="all-unsupported-returns-empty",
|
||||
),
|
||||
pytest.param("eng+eng", ["en"], id="duplicates-deduplicated"),
|
||||
pytest.param(
|
||||
"ita_unknownscript",
|
||||
["it"],
|
||||
id="unknown-script-falls-back-to-language",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_ocr_to_dateparser_languages(ocr_language, expected):
|
||||
def test_ocr_to_dateparser_languages(ocr_language: str, expected: list[str]) -> None:
|
||||
assert sorted(ocr_to_dateparser_languages(ocr_language)) == sorted(expected)
|
||||
|
||||
|
||||
def test_ocr_to_dateparser_languages_exception(monkeypatch, caplog):
|
||||
def test_ocr_to_dateparser_languages_exception(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
# Patch LocaleDataLoader.get_locale_map to raise an exception
|
||||
class DummyLoader:
|
||||
def get_locale_map(self, locales=None):
|
||||
|
||||
@@ -1,24 +1,31 @@
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from django.test import override_settings
|
||||
from django.test import Client
|
||||
from pytest_django.fixtures import SettingsWrapper
|
||||
|
||||
|
||||
def test_favicon_view(client):
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
static_dir = Path(tmpdir)
|
||||
favicon_path = static_dir / "paperless" / "img" / "favicon.ico"
|
||||
favicon_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
favicon_path.write_bytes(b"FAKE ICON DATA")
|
||||
def test_favicon_view(
|
||||
client: Client,
|
||||
tmp_path: Path,
|
||||
settings: SettingsWrapper,
|
||||
) -> None:
|
||||
favicon_path = tmp_path / "paperless" / "img" / "favicon.ico"
|
||||
favicon_path.parent.mkdir(parents=True)
|
||||
favicon_path.write_bytes(b"FAKE ICON DATA")
|
||||
|
||||
with override_settings(STATIC_ROOT=static_dir):
|
||||
response = client.get("/favicon.ico")
|
||||
assert response.status_code == 200
|
||||
assert response["Content-Type"] == "image/x-icon"
|
||||
assert b"".join(response.streaming_content) == b"FAKE ICON DATA"
|
||||
settings.STATIC_ROOT = tmp_path
|
||||
|
||||
response = client.get("/favicon.ico")
|
||||
assert response.status_code == 200
|
||||
assert response["Content-Type"] == "image/x-icon"
|
||||
assert b"".join(response.streaming_content) == b"FAKE ICON DATA"
|
||||
|
||||
|
||||
def test_favicon_view_missing_file(client):
|
||||
with override_settings(STATIC_ROOT=Path(tempfile.mkdtemp())):
|
||||
response = client.get("/favicon.ico")
|
||||
assert response.status_code == 404
|
||||
def test_favicon_view_missing_file(
|
||||
client: Client,
|
||||
tmp_path: Path,
|
||||
settings: SettingsWrapper,
|
||||
) -> None:
|
||||
settings.STATIC_ROOT = tmp_path
|
||||
response = client.get("/favicon.ico")
|
||||
assert response.status_code == 404
|
||||
|
||||
Reference in New Issue
Block a user