mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-03-03 15:56:24 +00:00
Compare commits
3 Commits
chore/lock
...
feature-py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
98d5d665f0 | ||
|
|
3fa9e75fa0 | ||
|
|
c94b6ce792 |
@@ -39,6 +39,3 @@ max_line_length = off
|
|||||||
|
|
||||||
[Dockerfile*]
|
[Dockerfile*]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
[*.toml]
|
|
||||||
indent_style = space
|
|
||||||
|
|||||||
7
.github/workflows/ci-backend.yml
vendored
7
.github/workflows/ci-backend.yml
vendored
@@ -22,7 +22,6 @@ on:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: backend-${{ github.event.pull_request.number || github.ref }}
|
group: backend-${{ github.event.pull_request.number || github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
permissions: {}
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_UV_VERSION: "0.10.x"
|
DEFAULT_UV_VERSION: "0.10.x"
|
||||||
NLTK_DATA: "/usr/share/nltk_data"
|
NLTK_DATA: "/usr/share/nltk_data"
|
||||||
@@ -30,11 +29,9 @@ jobs:
|
|||||||
test:
|
test:
|
||||||
name: "Python ${{ matrix.python-version }}"
|
name: "Python ${{ matrix.python-version }}"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ['3.10', '3.11', '3.12']
|
python-version: ['3.11', '3.12', '3.13', '3.14']
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
@@ -105,8 +102,6 @@ jobs:
|
|||||||
typing:
|
typing:
|
||||||
name: Check project typing
|
name: Check project typing
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.12"
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
1
.github/workflows/ci-docker.yml
vendored
1
.github/workflows/ci-docker.yml
vendored
@@ -15,7 +15,6 @@ on:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: docker-${{ github.event.pull_request.number || github.ref }}
|
group: docker-${{ github.event.pull_request.number || github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
permissions: {}
|
|
||||||
env:
|
env:
|
||||||
REGISTRY: ghcr.io
|
REGISTRY: ghcr.io
|
||||||
jobs:
|
jobs:
|
||||||
|
|||||||
11
.github/workflows/ci-docs.yml
vendored
11
.github/workflows/ci-docs.yml
vendored
@@ -21,7 +21,10 @@ on:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: docs-${{ github.event.pull_request.number || github.ref }}
|
group: docs-${{ github.event.pull_request.number || github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
permissions: {}
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pages: write
|
||||||
|
id-token: write
|
||||||
env:
|
env:
|
||||||
DEFAULT_UV_VERSION: "0.10.x"
|
DEFAULT_UV_VERSION: "0.10.x"
|
||||||
DEFAULT_PYTHON_VERSION: "3.12"
|
DEFAULT_PYTHON_VERSION: "3.12"
|
||||||
@@ -29,8 +32,6 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
name: Build Documentation
|
name: Build Documentation
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/configure-pages@v5
|
- uses: actions/configure-pages@v5
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
@@ -66,10 +67,6 @@ jobs:
|
|||||||
needs: build
|
needs: build
|
||||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pages: write
|
|
||||||
id-token: write
|
|
||||||
environment:
|
environment:
|
||||||
name: github-pages
|
name: github-pages
|
||||||
url: ${{ steps.deployment.outputs.page_url }}
|
url: ${{ steps.deployment.outputs.page_url }}
|
||||||
|
|||||||
11
.github/workflows/ci-frontend.yml
vendored
11
.github/workflows/ci-frontend.yml
vendored
@@ -16,13 +16,10 @@ on:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: frontend-${{ github.event.pull_request.number || github.ref }}
|
group: frontend-${{ github.event.pull_request.number || github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
permissions: {}
|
|
||||||
jobs:
|
jobs:
|
||||||
install-dependencies:
|
install-dependencies:
|
||||||
name: Install Dependencies
|
name: Install Dependencies
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
@@ -50,8 +47,6 @@ jobs:
|
|||||||
name: Lint
|
name: Lint
|
||||||
needs: install-dependencies
|
needs: install-dependencies
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
@@ -80,8 +75,6 @@ jobs:
|
|||||||
name: "Unit Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
name: "Unit Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||||
needs: install-dependencies
|
needs: install-dependencies
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
@@ -128,8 +121,6 @@ jobs:
|
|||||||
name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||||
needs: install-dependencies
|
needs: install-dependencies
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
container: mcr.microsoft.com/playwright:v1.58.2-noble
|
container: mcr.microsoft.com/playwright:v1.58.2-noble
|
||||||
env:
|
env:
|
||||||
PLAYWRIGHT_BROWSERS_PATH: /ms-playwright
|
PLAYWRIGHT_BROWSERS_PATH: /ms-playwright
|
||||||
@@ -170,8 +161,6 @@ jobs:
|
|||||||
name: Bundle Analysis
|
name: Bundle Analysis
|
||||||
needs: [unit-tests, e2e-tests]
|
needs: [unit-tests, e2e-tests]
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
|
|||||||
3
.github/workflows/ci-lint.yml
vendored
3
.github/workflows/ci-lint.yml
vendored
@@ -9,13 +9,10 @@ on:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: lint-${{ github.event.pull_request.number || github.ref }}
|
group: lint-${{ github.event.pull_request.number || github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
permissions: {}
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
name: Linting via prek
|
name: Linting via prek
|
||||||
runs-on: ubuntu-slim
|
runs-on: ubuntu-slim
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6.0.2
|
uses: actions/checkout@v6.0.2
|
||||||
|
|||||||
16
.github/workflows/ci-release.yml
vendored
16
.github/workflows/ci-release.yml
vendored
@@ -7,7 +7,6 @@ on:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: release-${{ github.ref }}
|
group: release-${{ github.ref }}
|
||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
permissions: {}
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_UV_VERSION: "0.10.x"
|
DEFAULT_UV_VERSION: "0.10.x"
|
||||||
DEFAULT_PYTHON_VERSION: "3.12"
|
DEFAULT_PYTHON_VERSION: "3.12"
|
||||||
@@ -15,10 +14,6 @@ jobs:
|
|||||||
wait-for-docker:
|
wait-for-docker:
|
||||||
name: Wait for Docker Build
|
name: Wait for Docker Build
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
# lewagon/wait-on-check-action reads workflow check runs
|
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
steps:
|
steps:
|
||||||
- name: Wait for Docker build
|
- name: Wait for Docker build
|
||||||
uses: lewagon/wait-on-check-action@v1.5.0
|
uses: lewagon/wait-on-check-action@v1.5.0
|
||||||
@@ -31,8 +26,6 @@ jobs:
|
|||||||
name: Build Release
|
name: Build Release
|
||||||
needs: wait-for-docker
|
needs: wait-for-docker
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
@@ -134,10 +127,6 @@ jobs:
|
|||||||
name: Publish Release
|
name: Publish Release
|
||||||
needs: build-release
|
needs: build-release
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
# release-drafter reads PRs to build the changelog and creates/publishes the release
|
|
||||||
contents: write
|
|
||||||
pull-requests: read
|
|
||||||
outputs:
|
outputs:
|
||||||
prerelease: ${{ steps.get-version.outputs.prerelease }}
|
prerelease: ${{ steps.get-version.outputs.prerelease }}
|
||||||
changelog: ${{ steps.create-release.outputs.body }}
|
changelog: ${{ steps.create-release.outputs.body }}
|
||||||
@@ -185,11 +174,6 @@ jobs:
|
|||||||
needs: publish-release
|
needs: publish-release
|
||||||
if: needs.publish-release.outputs.prerelease == 'false'
|
if: needs.publish-release.outputs.prerelease == 'false'
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
# git push of the changelog branch requires contents: write
|
|
||||||
# github.rest.pulls.create() and github.rest.issues.addLabels() require pull-requests: write
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
|
|||||||
1
.github/workflows/cleanup-tags.yml
vendored
1
.github/workflows/cleanup-tags.yml
vendored
@@ -12,7 +12,6 @@ on:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: registry-tags-cleanup
|
group: registry-tags-cleanup
|
||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
permissions: {}
|
|
||||||
jobs:
|
jobs:
|
||||||
cleanup-images:
|
cleanup-images:
|
||||||
name: Cleanup Image Tags for ${{ matrix.primary-name }}
|
name: Cleanup Image Tags for ${{ matrix.primary-name }}
|
||||||
|
|||||||
1
.github/workflows/codeql-analysis.yml
vendored
1
.github/workflows/codeql-analysis.yml
vendored
@@ -18,7 +18,6 @@ on:
|
|||||||
branches: [dev]
|
branches: [dev]
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '28 13 * * 5'
|
- cron: '28 13 * * 5'
|
||||||
permissions: {}
|
|
||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
name: Analyze
|
name: Analyze
|
||||||
|
|||||||
5
.github/workflows/crowdin.yml
vendored
5
.github/workflows/crowdin.yml
vendored
@@ -6,16 +6,11 @@ on:
|
|||||||
push:
|
push:
|
||||||
paths: ['src/locale/**', 'src-ui/messages.xlf', 'src-ui/src/locale/**']
|
paths: ['src/locale/**', 'src-ui/messages.xlf', 'src-ui/src/locale/**']
|
||||||
branches: [dev]
|
branches: [dev]
|
||||||
permissions: {}
|
|
||||||
jobs:
|
jobs:
|
||||||
synchronize-with-crowdin:
|
synchronize-with-crowdin:
|
||||||
name: Crowdin Sync
|
name: Crowdin Sync
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
# Crowdin action pushes translation branches and creates/updates PRs via GITHUB_TOKEN
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
|
|||||||
8
.github/workflows/pr-bot.yml
vendored
8
.github/workflows/pr-bot.yml
vendored
@@ -2,15 +2,13 @@ name: PR Bot
|
|||||||
on:
|
on:
|
||||||
pull_request_target:
|
pull_request_target:
|
||||||
types: [opened]
|
types: [opened]
|
||||||
permissions: {}
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
jobs:
|
jobs:
|
||||||
pr-bot:
|
pr-bot:
|
||||||
name: Automated PR Bot
|
name: Automated PR Bot
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# labeler reads file paths; all steps add labels or post comments on PRs
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
steps:
|
||||||
- name: Label PR by file path or branch name
|
- name: Label PR by file path or branch name
|
||||||
# see .github/labeler.yml for the labeler config
|
# see .github/labeler.yml for the labeler config
|
||||||
|
|||||||
5
.github/workflows/project-actions.yml
vendored
5
.github/workflows/project-actions.yml
vendored
@@ -7,14 +7,13 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- dev
|
- dev
|
||||||
permissions: {}
|
permissions:
|
||||||
|
contents: read
|
||||||
jobs:
|
jobs:
|
||||||
pr_opened_or_reopened:
|
pr_opened_or_reopened:
|
||||||
name: pr_opened_or_reopened
|
name: pr_opened_or_reopened
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
# release-drafter reads its config file from the repo
|
|
||||||
contents: read
|
|
||||||
# write permission is required for autolabeler
|
# write permission is required for autolabeler
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request.user.login != 'dependabot'
|
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request.user.login != 'dependabot'
|
||||||
|
|||||||
18
.github/workflows/repo-maintenance.yml
vendored
18
.github/workflows/repo-maintenance.yml
vendored
@@ -3,7 +3,10 @@ on:
|
|||||||
schedule:
|
schedule:
|
||||||
- cron: '0 3 * * *'
|
- cron: '0 3 * * *'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
permissions: {}
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
discussions: write
|
||||||
concurrency:
|
concurrency:
|
||||||
group: lock
|
group: lock
|
||||||
jobs:
|
jobs:
|
||||||
@@ -11,9 +14,6 @@ jobs:
|
|||||||
name: 'Stale'
|
name: 'Stale'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v10
|
- uses: actions/stale@v10
|
||||||
with:
|
with:
|
||||||
@@ -36,10 +36,6 @@ jobs:
|
|||||||
name: 'Lock Old Threads'
|
name: 'Lock Old Threads'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
discussions: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: dessant/lock-threads@v6
|
- uses: dessant/lock-threads@v6
|
||||||
with:
|
with:
|
||||||
@@ -60,8 +56,6 @@ jobs:
|
|||||||
name: 'Close Answered Discussions'
|
name: 'Close Answered Discussions'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
discussions: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/github-script@v8
|
- uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
@@ -119,8 +113,6 @@ jobs:
|
|||||||
name: 'Close Outdated Discussions'
|
name: 'Close Outdated Discussions'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
discussions: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/github-script@v8
|
- uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
@@ -213,8 +205,6 @@ jobs:
|
|||||||
name: 'Close Unsupported Feature Requests'
|
name: 'Close Unsupported Feature Requests'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
|
||||||
discussions: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/github-script@v8
|
- uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
|
|||||||
1
.github/workflows/translate-strings.yml
vendored
1
.github/workflows/translate-strings.yml
vendored
@@ -3,7 +3,6 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- dev
|
- dev
|
||||||
permissions: {}
|
|
||||||
jobs:
|
jobs:
|
||||||
generate-translate-strings:
|
generate-translate-strings:
|
||||||
name: Generate Translation Strings
|
name: Generate Translation Strings
|
||||||
|
|||||||
@@ -62,10 +62,6 @@ copies you created in the steps above.
|
|||||||
|
|
||||||
## Updating Paperless {#updating}
|
## Updating Paperless {#updating}
|
||||||
|
|
||||||
!!! warning
|
|
||||||
|
|
||||||
Please review the [migration instructions](migration-v3.md) before upgrading Paperless-ngx to v3.0, it includes some breaking changes that require manual intervention before upgrading.
|
|
||||||
|
|
||||||
### Docker Route {#docker-updating}
|
### Docker Route {#docker-updating}
|
||||||
|
|
||||||
If a new release of paperless-ngx is available, upgrading depends on how
|
If a new release of paperless-ngx is available, upgrading depends on how
|
||||||
|
|||||||
@@ -1,37 +1,9 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## paperless-ngx 2.20.9
|
|
||||||
|
|
||||||
### Security
|
|
||||||
|
|
||||||
- Resolve [GHSA-386h-chg4-cfw9](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-386h-chg4-cfw9)
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
- Fixhancement: config option reset [@shamoon](https://github.com/shamoon) ([#12176](https://github.com/paperless-ngx/paperless-ngx/pull/12176))
|
|
||||||
- Fix: correct page count by separating display vs collection sizes for tags [@shamoon](https://github.com/shamoon) ([#12170](https://github.com/paperless-ngx/paperless-ngx/pull/12170))
|
|
||||||
|
|
||||||
### All App Changes
|
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary>2 changes</summary>
|
|
||||||
|
|
||||||
- Fixhancement: config option reset [@shamoon](https://github.com/shamoon) ([#12176](https://github.com/paperless-ngx/paperless-ngx/pull/12176))
|
|
||||||
- Fix: correct page count by separating display vs collection sizes for tags [@shamoon](https://github.com/shamoon) ([#12170](https://github.com/paperless-ngx/paperless-ngx/pull/12170))
|
|
||||||
</details>
|
|
||||||
|
|
||||||
## paperless-ngx 2.20.8
|
## paperless-ngx 2.20.8
|
||||||
|
|
||||||
### Security
|
|
||||||
|
|
||||||
- Resolve [GHSA-7qqc-wrcw-2fj9](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-7qqc-wrcw-2fj9)
|
|
||||||
|
|
||||||
## paperless-ngx 2.20.7
|
## paperless-ngx 2.20.7
|
||||||
|
|
||||||
### Security
|
|
||||||
|
|
||||||
- Resolve [GHSA-x395-6h48-wr8v](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-x395-6h48-wr8v)
|
|
||||||
|
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
|
|
||||||
- Performance fix: use subqueries to improve object retrieval in large installs [@shamoon](https://github.com/shamoon) ([#11950](https://github.com/paperless-ngx/paperless-ngx/pull/11950))
|
- Performance fix: use subqueries to improve object retrieval in large installs [@shamoon](https://github.com/shamoon) ([#11950](https://github.com/paperless-ngx/paperless-ngx/pull/11950))
|
||||||
@@ -50,10 +22,6 @@
|
|||||||
|
|
||||||
## paperless-ngx 2.20.6
|
## paperless-ngx 2.20.6
|
||||||
|
|
||||||
### Security
|
|
||||||
|
|
||||||
- Resolve [GHSA-jqwv-hx7q-fxh3](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-jqwv-hx7q-fxh3) and [GHSA-w47q-3m69-84v8](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-w47q-3m69-84v8)
|
|
||||||
|
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
|
|
||||||
- Fix: extract all ids for nested tags [@shamoon](https://github.com/shamoon) ([#11888](https://github.com/paperless-ngx/paperless-ngx/pull/11888))
|
- Fix: extract all ids for nested tags [@shamoon](https://github.com/shamoon) ([#11888](https://github.com/paperless-ngx/paperless-ngx/pull/11888))
|
||||||
|
|||||||
@@ -51,172 +51,137 @@ matcher.
|
|||||||
### Database
|
### Database
|
||||||
|
|
||||||
By default, Paperless uses **SQLite** with a database stored at `data/db.sqlite3`.
|
By default, Paperless uses **SQLite** with a database stored at `data/db.sqlite3`.
|
||||||
For multi-user or higher-throughput deployments, **PostgreSQL** (recommended) or
|
To switch to **PostgreSQL** or **MariaDB**, set [`PAPERLESS_DBHOST`](#PAPERLESS_DBHOST) and optionally configure other
|
||||||
**MariaDB** can be used instead by setting [`PAPERLESS_DBENGINE`](#PAPERLESS_DBENGINE)
|
database-related environment variables.
|
||||||
and the relevant connection variables.
|
|
||||||
|
|
||||||
#### [`PAPERLESS_DBENGINE=<engine>`](#PAPERLESS_DBENGINE) {#PAPERLESS_DBENGINE}
|
|
||||||
|
|
||||||
: Specifies the database engine to use. Accepted values are `sqlite`, `postgresql`,
|
|
||||||
and `mariadb`.
|
|
||||||
|
|
||||||
Defaults to `sqlite` if not set.
|
|
||||||
|
|
||||||
PostgreSQL and MariaDB both require [`PAPERLESS_DBHOST`](#PAPERLESS_DBHOST) to be
|
|
||||||
set. SQLite does not use any other connection variables; the database file is always
|
|
||||||
located at `<PAPERLESS_DATA_DIR>/db.sqlite3`.
|
|
||||||
|
|
||||||
!!! warning
|
|
||||||
Using MariaDB comes with some caveats.
|
|
||||||
See [MySQL Caveats](advanced_usage.md#mysql-caveats).
|
|
||||||
|
|
||||||
#### [`PAPERLESS_DBHOST=<hostname>`](#PAPERLESS_DBHOST) {#PAPERLESS_DBHOST}
|
#### [`PAPERLESS_DBHOST=<hostname>`](#PAPERLESS_DBHOST) {#PAPERLESS_DBHOST}
|
||||||
|
|
||||||
: Hostname of the PostgreSQL or MariaDB database server. Required when
|
: If unset, Paperless uses **SQLite** by default.
|
||||||
`PAPERLESS_DBENGINE` is `postgresql` or `mariadb`.
|
|
||||||
|
Set `PAPERLESS_DBHOST` to switch to PostgreSQL or MariaDB instead.
|
||||||
|
|
||||||
|
#### [`PAPERLESS_DBENGINE=<engine_name>`](#PAPERLESS_DBENGINE) {#PAPERLESS_DBENGINE}
|
||||||
|
|
||||||
|
: Optional. Specifies the database engine to use when connecting to a remote database.
|
||||||
|
Available options are `postgresql` and `mariadb`.
|
||||||
|
|
||||||
|
Defaults to `postgresql` if `PAPERLESS_DBHOST` is set.
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
Using MariaDB comes with some caveats. See [MySQL Caveats](advanced_usage.md#mysql-caveats).
|
||||||
|
|
||||||
#### [`PAPERLESS_DBPORT=<port>`](#PAPERLESS_DBPORT) {#PAPERLESS_DBPORT}
|
#### [`PAPERLESS_DBPORT=<port>`](#PAPERLESS_DBPORT) {#PAPERLESS_DBPORT}
|
||||||
|
|
||||||
: Port to use when connecting to PostgreSQL or MariaDB.
|
: Port to use when connecting to PostgreSQL or MariaDB.
|
||||||
|
|
||||||
Defaults to `5432` for PostgreSQL and `3306` for MariaDB.
|
Default is `5432` for PostgreSQL and `3306` for MariaDB.
|
||||||
|
|
||||||
#### [`PAPERLESS_DBNAME=<name>`](#PAPERLESS_DBNAME) {#PAPERLESS_DBNAME}
|
#### [`PAPERLESS_DBNAME=<name>`](#PAPERLESS_DBNAME) {#PAPERLESS_DBNAME}
|
||||||
|
|
||||||
: Name of the PostgreSQL or MariaDB database to connect to.
|
: Name of the database to connect to when using PostgreSQL or MariaDB.
|
||||||
|
|
||||||
Defaults to `paperless`.
|
Defaults to "paperless".
|
||||||
|
|
||||||
#### [`PAPERLESS_DBUSER=<user>`](#PAPERLESS_DBUSER) {#PAPERLESS_DBUSER}
|
#### [`PAPERLESS_DBUSER=<name>`](#PAPERLESS_DBUSER) {#PAPERLESS_DBUSER}
|
||||||
|
|
||||||
: Username for authenticating with the PostgreSQL or MariaDB database.
|
: Username for authenticating with the PostgreSQL or MariaDB database.
|
||||||
|
|
||||||
Defaults to `paperless`.
|
Defaults to "paperless".
|
||||||
|
|
||||||
#### [`PAPERLESS_DBPASS=<password>`](#PAPERLESS_DBPASS) {#PAPERLESS_DBPASS}
|
#### [`PAPERLESS_DBPASS=<password>`](#PAPERLESS_DBPASS) {#PAPERLESS_DBPASS}
|
||||||
|
|
||||||
: Password for the PostgreSQL or MariaDB database user.
|
: Password for the PostgreSQL or MariaDB database user.
|
||||||
|
|
||||||
Defaults to `paperless`.
|
Defaults to "paperless".
|
||||||
|
|
||||||
#### [`PAPERLESS_DB_OPTIONS=<options>`](#PAPERLESS_DB_OPTIONS) {#PAPERLESS_DB_OPTIONS}
|
#### [`PAPERLESS_DBSSLMODE=<mode>`](#PAPERLESS_DBSSLMODE) {#PAPERLESS_DBSSLMODE}
|
||||||
|
|
||||||
: Advanced database connection options as a semicolon-delimited key-value string.
|
: SSL mode to use when connecting to PostgreSQL or MariaDB.
|
||||||
Keys and values are separated by `=`. Dot-notation produces nested option
|
|
||||||
dictionaries; for example, `pool.max_size=20` sets
|
|
||||||
`OPTIONS["pool"]["max_size"] = 20`.
|
|
||||||
|
|
||||||
Options specified here are merged over the engine defaults. Unrecognised keys
|
See [the official documentation about
|
||||||
are passed through to the underlying database driver without validation, so a
|
sslmode for PostgreSQL](https://www.postgresql.org/docs/current/libpq-ssl.html).
|
||||||
typo will be silently ignored rather than producing an error.
|
|
||||||
|
|
||||||
Refer to your database driver's documentation for the full set of accepted keys:
|
See [the official documentation about
|
||||||
|
sslmode for MySQL and MariaDB](https://dev.mysql.com/doc/refman/8.0/en/connection-options.html#option_general_ssl-mode).
|
||||||
|
|
||||||
- PostgreSQL: [libpq connection parameters](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS)
|
*Note*: SSL mode values differ between PostgreSQL and MariaDB.
|
||||||
- MariaDB: [MariaDB Connector/Python](https://mariadb.com/kb/en/mariadb-connector-python/)
|
|
||||||
- SQLite: [SQLite PRAGMA statements](https://www.sqlite.org/pragma.html)
|
|
||||||
|
|
||||||
!!! note "PostgreSQL connection pooling"
|
Default is `prefer` for PostgreSQL and `PREFERRED` for MariaDB.
|
||||||
|
|
||||||
Pool size is controlled via `pool.min_size` and `pool.max_size`. When
|
#### [`PAPERLESS_DBSSLROOTCERT=<ca-path>`](#PAPERLESS_DBSSLROOTCERT) {#PAPERLESS_DBSSLROOTCERT}
|
||||||
configuring pooling, ensure your PostgreSQL `max_connections` is large enough
|
|
||||||
to handle all pool connections across all workers:
|
|
||||||
`(web_workers + celery_workers) * pool.max_size + safety_margin`.
|
|
||||||
|
|
||||||
**Examples:**
|
: Path to the SSL root certificate used to verify the database server.
|
||||||
|
|
||||||
```bash title="PostgreSQL: require SSL, set a custom CA certificate, and limit the pool size"
|
See [the official documentation about
|
||||||
PAPERLESS_DB_OPTIONS="sslmode=require;sslrootcert=/certs/ca.pem;pool.max_size=5"
|
sslmode for PostgreSQL](https://www.postgresql.org/docs/current/libpq-ssl.html).
|
||||||
```
|
Changes the location of `root.crt`.
|
||||||
|
|
||||||
```bash title="MariaDB: require SSL with a custom CA certificate"
|
See [the official documentation about
|
||||||
PAPERLESS_DB_OPTIONS="ssl_mode=REQUIRED;ssl.ca=/certs/ca.pem"
|
sslmode for MySQL and MariaDB](https://dev.mysql.com/doc/refman/8.0/en/connection-options.html#option_general_ssl-ca).
|
||||||
```
|
|
||||||
|
|
||||||
```bash title="SQLite: set a busy timeout of 30 seconds"
|
Defaults to unset, using the standard location in the home directory.
|
||||||
# PostgreSQL: set a connection timeout
|
|
||||||
PAPERLESS_DB_OPTIONS="connect_timeout=10"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### ~~[`PAPERLESS_DBSSLMODE`](#PAPERLESS_DBSSLMODE)~~ {#PAPERLESS_DBSSLMODE}
|
#### [`PAPERLESS_DBSSLCERT=<client-cert-path>`](#PAPERLESS_DBSSLCERT) {#PAPERLESS_DBSSLCERT}
|
||||||
|
|
||||||
!!! failure "Removed in v3"
|
: Path to the client SSL certificate used when connecting securely.
|
||||||
|
|
||||||
Use [`PAPERLESS_DB_OPTIONS`](#PAPERLESS_DB_OPTIONS) instead.
|
See [the official documentation about
|
||||||
|
sslmode for PostgreSQL](https://www.postgresql.org/docs/current/libpq-ssl.html).
|
||||||
|
|
||||||
```bash title="PostgreSQL"
|
See [the official documentation about
|
||||||
PAPERLESS_DB_OPTIONS="sslmode=require"
|
sslmode for MySQL and MariaDB](https://dev.mysql.com/doc/refman/8.0/en/connection-options.html#option_general_ssl-cert).
|
||||||
```
|
|
||||||
|
|
||||||
```bash title="MariaDB"
|
Changes the location of `postgresql.crt`.
|
||||||
PAPERLESS_DB_OPTIONS="ssl_mode=REQUIRED"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### ~~[`PAPERLESS_DBSSLROOTCERT`](#PAPERLESS_DBSSLROOTCERT)~~ {#PAPERLESS_DBSSLROOTCERT}
|
Defaults to unset, using the standard location in the home directory.
|
||||||
|
|
||||||
!!! failure "Removed in v3"
|
#### [`PAPERLESS_DBSSLKEY=<client-cert-key>`](#PAPERLESS_DBSSLKEY) {#PAPERLESS_DBSSLKEY}
|
||||||
|
|
||||||
Use [`PAPERLESS_DB_OPTIONS`](#PAPERLESS_DB_OPTIONS) instead.
|
: Path to the client SSL private key used when connecting securely.
|
||||||
|
|
||||||
```bash title="PostgreSQL"
|
See [the official documentation about
|
||||||
PAPERLESS_DB_OPTIONS="sslrootcert=/path/to/ca.pem"
|
sslmode for PostgreSQL](https://www.postgresql.org/docs/current/libpq-ssl.html).
|
||||||
```
|
|
||||||
|
|
||||||
```bash title="MariaDB"
|
See [the official documentation about
|
||||||
PAPERLESS_DB_OPTIONS="ssl.ca=/path/to/ca.pem"
|
sslmode for MySQL and MariaDB](https://dev.mysql.com/doc/refman/8.0/en/connection-options.html#option_general_ssl-key).
|
||||||
```
|
|
||||||
|
|
||||||
#### ~~[`PAPERLESS_DBSSLCERT`](#PAPERLESS_DBSSLCERT)~~ {#PAPERLESS_DBSSLCERT}
|
Changes the location of `postgresql.key`.
|
||||||
|
|
||||||
!!! failure "Removed in v3"
|
Defaults to unset, using the standard location in the home directory.
|
||||||
|
|
||||||
Use [`PAPERLESS_DB_OPTIONS`](#PAPERLESS_DB_OPTIONS) instead.
|
#### [`PAPERLESS_DB_TIMEOUT=<int>`](#PAPERLESS_DB_TIMEOUT) {#PAPERLESS_DB_TIMEOUT}
|
||||||
|
|
||||||
```bash title="PostgreSQL"
|
: Sets how long a database connection should wait before timing out.
|
||||||
PAPERLESS_DB_OPTIONS="sslcert=/path/to/client.crt"
|
|
||||||
```
|
|
||||||
|
|
||||||
```bash title="MariaDB"
|
For SQLite, this sets how long to wait if the database is locked.
|
||||||
PAPERLESS_DB_OPTIONS="ssl.cert=/path/to/client.crt"
|
For PostgreSQL or MariaDB, this sets the connection timeout.
|
||||||
```
|
|
||||||
|
|
||||||
#### ~~[`PAPERLESS_DBSSLKEY`](#PAPERLESS_DBSSLKEY)~~ {#PAPERLESS_DBSSLKEY}
|
Defaults to unset, which uses Django’s built-in defaults.
|
||||||
|
|
||||||
!!! failure "Removed in v3"
|
#### [`PAPERLESS_DB_POOLSIZE=<int>`](#PAPERLESS_DB_POOLSIZE) {#PAPERLESS_DB_POOLSIZE}
|
||||||
|
|
||||||
Use [`PAPERLESS_DB_OPTIONS`](#PAPERLESS_DB_OPTIONS) instead.
|
: Defines the maximum number of database connections to keep in the pool.
|
||||||
|
|
||||||
```bash title="PostgreSQL"
|
Only applies to PostgreSQL. This setting is ignored for other database engines.
|
||||||
PAPERLESS_DB_OPTIONS="sslkey=/path/to/client.key"
|
|
||||||
```
|
|
||||||
|
|
||||||
```bash title="MariaDB"
|
The value must be greater than or equal to 1 to be used.
|
||||||
PAPERLESS_DB_OPTIONS="ssl.key=/path/to/client.key"
|
Defaults to unset, which disables connection pooling.
|
||||||
```
|
|
||||||
|
|
||||||
#### ~~[`PAPERLESS_DB_TIMEOUT`](#PAPERLESS_DB_TIMEOUT)~~ {#PAPERLESS_DB_TIMEOUT}
|
!!! note
|
||||||
|
|
||||||
!!! failure "Removed in v3"
|
A pool of 8-10 connections per worker is typically sufficient.
|
||||||
|
If you encounter error messages such as `couldn't get a connection`
|
||||||
|
or database connection timeouts, you probably need to increase the pool size.
|
||||||
|
|
||||||
Use [`PAPERLESS_DB_OPTIONS`](#PAPERLESS_DB_OPTIONS) instead.
|
!!! warning
|
||||||
|
Make sure your PostgreSQL `max_connections` setting is large enough to handle the connection pools:
|
||||||
|
`(NB_PAPERLESS_WORKERS + NB_CELERY_WORKERS) × POOL_SIZE + SAFETY_MARGIN`. For example, with
|
||||||
|
4 Paperless workers and 2 Celery workers, and a pool size of 8:``(4 + 2) × 8 + 10 = 58`,
|
||||||
|
so `max_connections = 60` (or even more) is appropriate.
|
||||||
|
|
||||||
```bash title="SQLite"
|
This assumes only Paperless-ngx connects to your PostgreSQL instance. If you have other applications,
|
||||||
PAPERLESS_DB_OPTIONS="timeout=30"
|
you should increase `max_connections` accordingly.
|
||||||
```
|
|
||||||
|
|
||||||
```bash title="PostgreSQL or MariaDB"
|
|
||||||
PAPERLESS_DB_OPTIONS="connect_timeout=30"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### ~~[`PAPERLESS_DB_POOLSIZE`](#PAPERLESS_DB_POOLSIZE)~~ {#PAPERLESS_DB_POOLSIZE}
|
|
||||||
|
|
||||||
!!! failure "Removed in v3"
|
|
||||||
|
|
||||||
Use [`PAPERLESS_DB_OPTIONS`](#PAPERLESS_DB_OPTIONS) instead.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
PAPERLESS_DB_OPTIONS="pool.max_size=10"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### [`PAPERLESS_DB_READ_CACHE_ENABLED=<bool>`](#PAPERLESS_DB_READ_CACHE_ENABLED) {#PAPERLESS_DB_READ_CACHE_ENABLED}
|
#### [`PAPERLESS_DB_READ_CACHE_ENABLED=<bool>`](#PAPERLESS_DB_READ_CACHE_ENABLED) {#PAPERLESS_DB_READ_CACHE_ENABLED}
|
||||||
|
|
||||||
|
|||||||
@@ -48,58 +48,3 @@ The `CONSUMER_BARCODE_SCANNER` setting has been removed. zxing-cpp is now the on
|
|||||||
reliability.
|
reliability.
|
||||||
- The `libzbar0` / `libzbar-dev` system packages are no longer required and can be removed from any custom Docker
|
- The `libzbar0` / `libzbar-dev` system packages are no longer required and can be removed from any custom Docker
|
||||||
images or host installations.
|
images or host installations.
|
||||||
|
|
||||||
## Database Engine
|
|
||||||
|
|
||||||
`PAPERLESS_DBENGINE` is now required to use PostgreSQL or MariaDB. Previously, the
|
|
||||||
engine was inferred from the presence of `PAPERLESS_DBHOST`, with `PAPERLESS_DBENGINE`
|
|
||||||
only needed to select MariaDB over PostgreSQL.
|
|
||||||
|
|
||||||
SQLite users require no changes, though they may explicitly set their engine if desired.
|
|
||||||
|
|
||||||
#### Action Required
|
|
||||||
|
|
||||||
PostgreSQL and MariaDB users must add `PAPERLESS_DBENGINE` to their environment:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# v2 (PostgreSQL inferred from PAPERLESS_DBHOST)
|
|
||||||
PAPERLESS_DBHOST: postgres
|
|
||||||
|
|
||||||
# v3 (engine must be explicit)
|
|
||||||
PAPERLESS_DBENGINE: postgresql
|
|
||||||
PAPERLESS_DBHOST: postgres
|
|
||||||
```
|
|
||||||
|
|
||||||
See [`PAPERLESS_DBENGINE`](configuration.md#PAPERLESS_DBENGINE) for accepted values.
|
|
||||||
|
|
||||||
## Database Advanced Options
|
|
||||||
|
|
||||||
The individual SSL, timeout, and pooling variables have been removed in favor of a
|
|
||||||
single [`PAPERLESS_DB_OPTIONS`](configuration.md#PAPERLESS_DB_OPTIONS) string. This
|
|
||||||
consolidates a growing set of engine-specific variables into one place, and allows
|
|
||||||
any option supported by the underlying database driver to be set without requiring a
|
|
||||||
dedicated environment variable for each.
|
|
||||||
|
|
||||||
The removed variables and their replacements are:
|
|
||||||
|
|
||||||
| Removed Variable | Replacement in `PAPERLESS_DB_OPTIONS` |
|
|
||||||
| ------------------------- | ---------------------------------------------------------------------------- |
|
|
||||||
| `PAPERLESS_DBSSLMODE` | `sslmode=<value>` (PostgreSQL) or `ssl_mode=<value>` (MariaDB) |
|
|
||||||
| `PAPERLESS_DBSSLROOTCERT` | `sslrootcert=<path>` (PostgreSQL) or `ssl.ca=<path>` (MariaDB) |
|
|
||||||
| `PAPERLESS_DBSSLCERT` | `sslcert=<path>` (PostgreSQL) or `ssl.cert=<path>` (MariaDB) |
|
|
||||||
| `PAPERLESS_DBSSLKEY` | `sslkey=<path>` (PostgreSQL) or `ssl.key=<path>` (MariaDB) |
|
|
||||||
| `PAPERLESS_DB_POOLSIZE` | `pool.max_size=<value>` (PostgreSQL only) |
|
|
||||||
| `PAPERLESS_DB_TIMEOUT` | `timeout=<value>` (SQLite) or `connect_timeout=<value>` (PostgreSQL/MariaDB) |
|
|
||||||
|
|
||||||
The deprecated variables will continue to function for now but will be removed in a
|
|
||||||
future release. A deprecation warning is logged at startup for each deprecated variable
|
|
||||||
that is still set.
|
|
||||||
|
|
||||||
#### Action Required
|
|
||||||
|
|
||||||
Users with any of the deprecated variables set should migrate to `PAPERLESS_DB_OPTIONS`.
|
|
||||||
Multiple options are combined in a single value:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
PAPERLESS_DB_OPTIONS="sslmode=require;sslrootcert=/certs/ca.pem;pool.max_size=10"
|
|
||||||
```
|
|
||||||
@@ -504,8 +504,9 @@ installation. Keep these points in mind:
|
|||||||
- Read the [changelog](changelog.md) and
|
- Read the [changelog](changelog.md) and
|
||||||
take note of breaking changes.
|
take note of breaking changes.
|
||||||
- Decide whether to stay on SQLite or migrate to PostgreSQL.
|
- Decide whether to stay on SQLite or migrate to PostgreSQL.
|
||||||
Both work fine with Paperless-ngx.
|
See [documentation](#sqlite_to_psql) for details on moving data
|
||||||
However, if you already have a database server running
|
from SQLite to PostgreSQL. Both work fine with
|
||||||
|
Paperless. However, if you already have a database server running
|
||||||
for other services, you might as well use it for Paperless as well.
|
for other services, you might as well use it for Paperless as well.
|
||||||
- The task scheduler of Paperless, which is used to execute periodic
|
- The task scheduler of Paperless, which is used to execute periodic
|
||||||
tasks such as email checking and maintenance, requires a
|
tasks such as email checking and maintenance, requires a
|
||||||
|
|||||||
@@ -626,7 +626,7 @@ The following placeholders are only available for "added" or "updated" triggers
|
|||||||
- `{{created_year_short}}`: created year
|
- `{{created_year_short}}`: created year
|
||||||
- `{{created_month}}`: created month
|
- `{{created_month}}`: created month
|
||||||
- `{{created_month_name}}`: created month name
|
- `{{created_month_name}}`: created month name
|
||||||
- `{{created_month_name_short}}`: created month short name
|
- `{created_month_name_short}}`: created month short name
|
||||||
- `{{created_day}}`: created day
|
- `{{created_day}}`: created day
|
||||||
- `{{created_time}}`: created time in HH:MM format
|
- `{{created_time}}`: created time in HH:MM format
|
||||||
- `{{doc_url}}`: URL to the document in the web UI. Requires the `PAPERLESS_URL` setting to be set.
|
- `{{doc_url}}`: URL to the document in the web UI. Requires the `PAPERLESS_URL` setting to be set.
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "paperless-ngx"
|
name = "paperless-ngx"
|
||||||
version = "2.20.9"
|
version = "2.20.8"
|
||||||
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
|
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.10"
|
requires-python = ">=3.11"
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
"Programming Language :: Python :: 3.10",
|
|
||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
"Programming Language :: Python :: 3.13",
|
"Programming Language :: Python :: 3.13",
|
||||||
@@ -37,7 +36,6 @@ dependencies = [
|
|||||||
"django-filter~=25.1",
|
"django-filter~=25.1",
|
||||||
"django-guardian~=3.3.0",
|
"django-guardian~=3.3.0",
|
||||||
"django-multiselectfield~=1.0.1",
|
"django-multiselectfield~=1.0.1",
|
||||||
"django-rich~=2.2.0",
|
|
||||||
"django-soft-delete~=1.0.18",
|
"django-soft-delete~=1.0.18",
|
||||||
"django-treenode>=0.23.2",
|
"django-treenode>=0.23.2",
|
||||||
"djangorestframework~=3.16",
|
"djangorestframework~=3.16",
|
||||||
@@ -77,6 +75,7 @@ dependencies = [
|
|||||||
"setproctitle~=1.3.4",
|
"setproctitle~=1.3.4",
|
||||||
"tika-client~=0.10.0",
|
"tika-client~=0.10.0",
|
||||||
"torch~=2.10.0",
|
"torch~=2.10.0",
|
||||||
|
"tqdm~=4.67.1",
|
||||||
"watchfiles>=1.1.1",
|
"watchfiles>=1.1.1",
|
||||||
"whitenoise~=6.11",
|
"whitenoise~=6.11",
|
||||||
"whoosh-reloaded>=2.7.5",
|
"whoosh-reloaded>=2.7.5",
|
||||||
@@ -149,6 +148,7 @@ typing = [
|
|||||||
"types-pytz",
|
"types-pytz",
|
||||||
"types-redis",
|
"types-redis",
|
||||||
"types-setuptools",
|
"types-setuptools",
|
||||||
|
"types-tqdm",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.uv]
|
[tool.uv]
|
||||||
@@ -176,7 +176,7 @@ torch = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
target-version = "py310"
|
target-version = "py311"
|
||||||
line-length = 88
|
line-length = 88
|
||||||
src = [
|
src = [
|
||||||
"src",
|
"src",
|
||||||
@@ -303,7 +303,6 @@ markers = [
|
|||||||
"tika: Tests requiring Tika service",
|
"tika: Tests requiring Tika service",
|
||||||
"greenmail: Tests requiring Greenmail service",
|
"greenmail: Tests requiring Greenmail service",
|
||||||
"date_parsing: Tests which cover date parsing from content or filename",
|
"date_parsing: Tests which cover date parsing from content or filename",
|
||||||
"management: Tests which cover management commands/functionality",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.pytest_env]
|
[tool.pytest_env]
|
||||||
|
|||||||
@@ -5,14 +5,14 @@
|
|||||||
<trans-unit id="ngb.alert.close" datatype="html">
|
<trans-unit id="ngb.alert.close" datatype="html">
|
||||||
<source>Close</source>
|
<source>Close</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/alert/alert.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/alert/alert.ts</context>
|
||||||
<context context-type="linenumber">50</context>
|
<context context-type="linenumber">50</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.carousel.slide-number" datatype="html">
|
<trans-unit id="ngb.carousel.slide-number" datatype="html">
|
||||||
<source> Slide <x id="INTERPOLATION" equiv-text="ueryList<NgbSli"/> of <x id="INTERPOLATION_1" equiv-text="EventSource = N"/> </source>
|
<source> Slide <x id="INTERPOLATION" equiv-text="ueryList<NgbSli"/> of <x id="INTERPOLATION_1" equiv-text="EventSource = N"/> </source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/carousel/carousel.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/carousel/carousel.ts</context>
|
||||||
<context context-type="linenumber">131,135</context>
|
<context context-type="linenumber">131,135</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<note priority="1" from="description">Currently selected slide number read by screen reader</note>
|
<note priority="1" from="description">Currently selected slide number read by screen reader</note>
|
||||||
@@ -20,114 +20,114 @@
|
|||||||
<trans-unit id="ngb.carousel.previous" datatype="html">
|
<trans-unit id="ngb.carousel.previous" datatype="html">
|
||||||
<source>Previous</source>
|
<source>Previous</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/carousel/carousel.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/carousel/carousel.ts</context>
|
||||||
<context context-type="linenumber">159,162</context>
|
<context context-type="linenumber">159,162</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.carousel.next" datatype="html">
|
<trans-unit id="ngb.carousel.next" datatype="html">
|
||||||
<source>Next</source>
|
<source>Next</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/carousel/carousel.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/carousel/carousel.ts</context>
|
||||||
<context context-type="linenumber">202,203</context>
|
<context context-type="linenumber">202,203</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.datepicker.select-month" datatype="html">
|
<trans-unit id="ngb.datepicker.select-month" datatype="html">
|
||||||
<source>Select month</source>
|
<source>Select month</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/datepicker/datepicker-navigation-select.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/datepicker/datepicker-navigation-select.ts</context>
|
||||||
<context context-type="linenumber">91</context>
|
<context context-type="linenumber">91</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/datepicker/datepicker-navigation-select.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/datepicker/datepicker-navigation-select.ts</context>
|
||||||
<context context-type="linenumber">91</context>
|
<context context-type="linenumber">91</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.datepicker.select-year" datatype="html">
|
<trans-unit id="ngb.datepicker.select-year" datatype="html">
|
||||||
<source>Select year</source>
|
<source>Select year</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/datepicker/datepicker-navigation-select.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/datepicker/datepicker-navigation-select.ts</context>
|
||||||
<context context-type="linenumber">91</context>
|
<context context-type="linenumber">91</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/datepicker/datepicker-navigation-select.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/datepicker/datepicker-navigation-select.ts</context>
|
||||||
<context context-type="linenumber">91</context>
|
<context context-type="linenumber">91</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.datepicker.previous-month" datatype="html">
|
<trans-unit id="ngb.datepicker.previous-month" datatype="html">
|
||||||
<source>Previous month</source>
|
<source>Previous month</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
||||||
<context context-type="linenumber">83,85</context>
|
<context context-type="linenumber">83,85</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
||||||
<context context-type="linenumber">112</context>
|
<context context-type="linenumber">112</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.datepicker.next-month" datatype="html">
|
<trans-unit id="ngb.datepicker.next-month" datatype="html">
|
||||||
<source>Next month</source>
|
<source>Next month</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
||||||
<context context-type="linenumber">112</context>
|
<context context-type="linenumber">112</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
||||||
<context context-type="linenumber">112</context>
|
<context context-type="linenumber">112</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.first" datatype="html">
|
<trans-unit id="ngb.pagination.first" datatype="html">
|
||||||
<source>««</source>
|
<source>««</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/pagination/pagination-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/pagination/pagination-config.ts</context>
|
||||||
<context context-type="linenumber">20</context>
|
<context context-type="linenumber">20</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.previous" datatype="html">
|
<trans-unit id="ngb.pagination.previous" datatype="html">
|
||||||
<source>«</source>
|
<source>«</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/pagination/pagination-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/pagination/pagination-config.ts</context>
|
||||||
<context context-type="linenumber">20</context>
|
<context context-type="linenumber">20</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.next" datatype="html">
|
<trans-unit id="ngb.pagination.next" datatype="html">
|
||||||
<source>»</source>
|
<source>»</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/pagination/pagination-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/pagination/pagination-config.ts</context>
|
||||||
<context context-type="linenumber">20</context>
|
<context context-type="linenumber">20</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.last" datatype="html">
|
<trans-unit id="ngb.pagination.last" datatype="html">
|
||||||
<source>»»</source>
|
<source>»»</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/pagination/pagination-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/pagination/pagination-config.ts</context>
|
||||||
<context context-type="linenumber">20</context>
|
<context context-type="linenumber">20</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.first-aria" datatype="html">
|
<trans-unit id="ngb.pagination.first-aria" datatype="html">
|
||||||
<source>First</source>
|
<source>First</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/pagination/pagination-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/pagination/pagination-config.ts</context>
|
||||||
<context context-type="linenumber">20</context>
|
<context context-type="linenumber">20</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.previous-aria" datatype="html">
|
<trans-unit id="ngb.pagination.previous-aria" datatype="html">
|
||||||
<source>Previous</source>
|
<source>Previous</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/pagination/pagination-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/pagination/pagination-config.ts</context>
|
||||||
<context context-type="linenumber">20</context>
|
<context context-type="linenumber">20</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.next-aria" datatype="html">
|
<trans-unit id="ngb.pagination.next-aria" datatype="html">
|
||||||
<source>Next</source>
|
<source>Next</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/pagination/pagination-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/pagination/pagination-config.ts</context>
|
||||||
<context context-type="linenumber">20</context>
|
<context context-type="linenumber">20</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.last-aria" datatype="html">
|
<trans-unit id="ngb.pagination.last-aria" datatype="html">
|
||||||
<source>Last</source>
|
<source>Last</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/pagination/pagination-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/pagination/pagination-config.ts</context>
|
||||||
<context context-type="linenumber">20</context>
|
<context context-type="linenumber">20</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
@@ -135,105 +135,105 @@
|
|||||||
<source><x id="INTERPOLATION" equiv-text="barConfig);
|
<source><x id="INTERPOLATION" equiv-text="barConfig);
|
||||||
pu"/></source>
|
pu"/></source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/progressbar/progressbar.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/progressbar/progressbar.ts</context>
|
||||||
<context context-type="linenumber">41,42</context>
|
<context context-type="linenumber">41,42</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.HH" datatype="html">
|
<trans-unit id="ngb.timepicker.HH" datatype="html">
|
||||||
<source>HH</source>
|
<source>HH</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.hours" datatype="html">
|
<trans-unit id="ngb.timepicker.hours" datatype="html">
|
||||||
<source>Hours</source>
|
<source>Hours</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.MM" datatype="html">
|
<trans-unit id="ngb.timepicker.MM" datatype="html">
|
||||||
<source>MM</source>
|
<source>MM</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.minutes" datatype="html">
|
<trans-unit id="ngb.timepicker.minutes" datatype="html">
|
||||||
<source>Minutes</source>
|
<source>Minutes</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.increment-hours" datatype="html">
|
<trans-unit id="ngb.timepicker.increment-hours" datatype="html">
|
||||||
<source>Increment hours</source>
|
<source>Increment hours</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.decrement-hours" datatype="html">
|
<trans-unit id="ngb.timepicker.decrement-hours" datatype="html">
|
||||||
<source>Decrement hours</source>
|
<source>Decrement hours</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.increment-minutes" datatype="html">
|
<trans-unit id="ngb.timepicker.increment-minutes" datatype="html">
|
||||||
<source>Increment minutes</source>
|
<source>Increment minutes</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.decrement-minutes" datatype="html">
|
<trans-unit id="ngb.timepicker.decrement-minutes" datatype="html">
|
||||||
<source>Decrement minutes</source>
|
<source>Decrement minutes</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.SS" datatype="html">
|
<trans-unit id="ngb.timepicker.SS" datatype="html">
|
||||||
<source>SS</source>
|
<source>SS</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.seconds" datatype="html">
|
<trans-unit id="ngb.timepicker.seconds" datatype="html">
|
||||||
<source>Seconds</source>
|
<source>Seconds</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.increment-seconds" datatype="html">
|
<trans-unit id="ngb.timepicker.increment-seconds" datatype="html">
|
||||||
<source>Increment seconds</source>
|
<source>Increment seconds</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.decrement-seconds" datatype="html">
|
<trans-unit id="ngb.timepicker.decrement-seconds" datatype="html">
|
||||||
<source>Decrement seconds</source>
|
<source>Decrement seconds</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.PM" datatype="html">
|
<trans-unit id="ngb.timepicker.PM" datatype="html">
|
||||||
<source><x id="INTERPOLATION"/></source>
|
<source><x id="INTERPOLATION"/></source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/timepicker/timepicker-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/timepicker/timepicker-config.ts</context>
|
||||||
<context context-type="linenumber">21</context>
|
<context context-type="linenumber">21</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.toast.close-aria" datatype="html">
|
<trans-unit id="ngb.toast.close-aria" datatype="html">
|
||||||
<source>Close</source>
|
<source>Close</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.2.0_@angular+core@21.2.0_@angular+_fdecb2f5429dfeda6301fd300107de5b/node_modules/src/toast/toast-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@20.0.0_@angular+common@21.1.3_@angular+core@21.1.3_@angular+_1ede04b1f6b65fa8e34a28e44afe1de9/node_modules/src/toast/toast-config.ts</context>
|
||||||
<context context-type="linenumber">54</context>
|
<context context-type="linenumber">54</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
@@ -503,29 +503,14 @@
|
|||||||
<source>Read the documentation about this setting</source>
|
<source>Read the documentation about this setting</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
||||||
<context context-type="linenumber">26</context>
|
<context context-type="linenumber">25</context>
|
||||||
</context-group>
|
|
||||||
</trans-unit>
|
|
||||||
<trans-unit id="7808756054397155068" datatype="html">
|
|
||||||
<source>Reset</source>
|
|
||||||
<context-group purpose="location">
|
|
||||||
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
|
||||||
<context context-type="linenumber">30</context>
|
|
||||||
</context-group>
|
|
||||||
<context-group purpose="location">
|
|
||||||
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
|
||||||
<context context-type="linenumber">31</context>
|
|
||||||
</context-group>
|
|
||||||
<context-group purpose="location">
|
|
||||||
<context context-type="sourcefile">src/app/components/admin/settings/settings.component.html</context>
|
|
||||||
<context context-type="linenumber">136</context>
|
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="2180291763949669799" datatype="html">
|
<trans-unit id="2180291763949669799" datatype="html">
|
||||||
<source>Enable</source>
|
<source>Enable</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
||||||
<context context-type="linenumber">39</context>
|
<context context-type="linenumber">34</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
|
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
|
||||||
@@ -536,7 +521,7 @@
|
|||||||
<source>Discard</source>
|
<source>Discard</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
||||||
<context context-type="linenumber">62</context>
|
<context context-type="linenumber">57</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||||
@@ -547,7 +532,7 @@
|
|||||||
<source>Save</source>
|
<source>Save</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
<context context-type="sourcefile">src/app/components/admin/config/config.component.html</context>
|
||||||
<context context-type="linenumber">65</context>
|
<context context-type="linenumber">60</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">src/app/components/admin/settings/settings.component.html</context>
|
<context context-type="sourcefile">src/app/components/admin/settings/settings.component.html</context>
|
||||||
@@ -973,6 +958,13 @@
|
|||||||
<context context-type="linenumber">129</context>
|
<context context-type="linenumber">129</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
|
<trans-unit id="7808756054397155068" datatype="html">
|
||||||
|
<source>Reset</source>
|
||||||
|
<context-group purpose="location">
|
||||||
|
<context context-type="sourcefile">src/app/components/admin/settings/settings.component.html</context>
|
||||||
|
<context context-type="linenumber">136</context>
|
||||||
|
</context-group>
|
||||||
|
</trans-unit>
|
||||||
<trans-unit id="6760166989231109310" datatype="html">
|
<trans-unit id="6760166989231109310" datatype="html">
|
||||||
<source>Global search</source>
|
<source>Global search</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "paperless-ngx-ui",
|
"name": "paperless-ngx-ui",
|
||||||
"version": "2.20.9",
|
"version": "2.20.8",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"preinstall": "npx only-allow pnpm",
|
"preinstall": "npx only-allow pnpm",
|
||||||
"ng": "ng",
|
"ng": "ng",
|
||||||
@@ -11,17 +11,17 @@
|
|||||||
},
|
},
|
||||||
"private": true,
|
"private": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@angular/cdk": "^21.2.0",
|
"@angular/cdk": "^21.1.3",
|
||||||
"@angular/common": "~21.2.0",
|
"@angular/common": "~21.1.3",
|
||||||
"@angular/compiler": "~21.2.0",
|
"@angular/compiler": "~21.1.3",
|
||||||
"@angular/core": "~21.2.0",
|
"@angular/core": "~21.1.3",
|
||||||
"@angular/forms": "~21.2.0",
|
"@angular/forms": "~21.1.3",
|
||||||
"@angular/localize": "~21.2.0",
|
"@angular/localize": "~21.1.3",
|
||||||
"@angular/platform-browser": "~21.2.0",
|
"@angular/platform-browser": "~21.1.3",
|
||||||
"@angular/platform-browser-dynamic": "~21.2.0",
|
"@angular/platform-browser-dynamic": "~21.1.3",
|
||||||
"@angular/router": "~21.2.0",
|
"@angular/router": "~21.1.3",
|
||||||
"@ng-bootstrap/ng-bootstrap": "^20.0.0",
|
"@ng-bootstrap/ng-bootstrap": "^20.0.0",
|
||||||
"@ng-select/ng-select": "^21.4.1",
|
"@ng-select/ng-select": "^21.2.0",
|
||||||
"@ngneat/dirty-check-forms": "^3.0.3",
|
"@ngneat/dirty-check-forms": "^3.0.3",
|
||||||
"@popperjs/core": "^2.11.8",
|
"@popperjs/core": "^2.11.8",
|
||||||
"bootstrap": "^5.3.8",
|
"bootstrap": "^5.3.8",
|
||||||
@@ -37,25 +37,25 @@
|
|||||||
"tslib": "^2.8.1",
|
"tslib": "^2.8.1",
|
||||||
"utif": "^3.1.0",
|
"utif": "^3.1.0",
|
||||||
"uuid": "^13.0.0",
|
"uuid": "^13.0.0",
|
||||||
"zone.js": "^0.16.1"
|
"zone.js": "^0.16.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@angular-builders/custom-webpack": "^21.0.3",
|
"@angular-builders/custom-webpack": "^21.0.3",
|
||||||
"@angular-builders/jest": "^21.0.3",
|
"@angular-builders/jest": "^21.0.3",
|
||||||
"@angular-devkit/core": "^21.2.0",
|
"@angular-devkit/core": "^21.1.3",
|
||||||
"@angular-devkit/schematics": "^21.2.0",
|
"@angular-devkit/schematics": "^21.1.3",
|
||||||
"@angular-eslint/builder": "21.2.0",
|
"@angular-eslint/builder": "21.2.0",
|
||||||
"@angular-eslint/eslint-plugin": "21.2.0",
|
"@angular-eslint/eslint-plugin": "21.2.0",
|
||||||
"@angular-eslint/eslint-plugin-template": "21.2.0",
|
"@angular-eslint/eslint-plugin-template": "21.2.0",
|
||||||
"@angular-eslint/schematics": "21.2.0",
|
"@angular-eslint/schematics": "21.2.0",
|
||||||
"@angular-eslint/template-parser": "21.2.0",
|
"@angular-eslint/template-parser": "21.2.0",
|
||||||
"@angular/build": "^21.2.0",
|
"@angular/build": "^21.1.3",
|
||||||
"@angular/cli": "~21.2.0",
|
"@angular/cli": "~21.1.3",
|
||||||
"@angular/compiler-cli": "~21.2.0",
|
"@angular/compiler-cli": "~21.1.3",
|
||||||
"@codecov/webpack-plugin": "^1.9.1",
|
"@codecov/webpack-plugin": "^1.9.1",
|
||||||
"@playwright/test": "^1.58.2",
|
"@playwright/test": "^1.58.2",
|
||||||
"@types/jest": "^30.0.0",
|
"@types/jest": "^30.0.0",
|
||||||
"@types/node": "^25.3.3",
|
"@types/node": "^25.2.1",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.54.0",
|
"@typescript-eslint/eslint-plugin": "^8.54.0",
|
||||||
"@typescript-eslint/parser": "^8.54.0",
|
"@typescript-eslint/parser": "^8.54.0",
|
||||||
"@typescript-eslint/utils": "^8.54.0",
|
"@typescript-eslint/utils": "^8.54.0",
|
||||||
@@ -63,12 +63,12 @@
|
|||||||
"jest": "30.2.0",
|
"jest": "30.2.0",
|
||||||
"jest-environment-jsdom": "^30.2.0",
|
"jest-environment-jsdom": "^30.2.0",
|
||||||
"jest-junit": "^16.0.0",
|
"jest-junit": "^16.0.0",
|
||||||
"jest-preset-angular": "^16.1.1",
|
"jest-preset-angular": "^16.0.0",
|
||||||
"jest-websocket-mock": "^2.5.0",
|
"jest-websocket-mock": "^2.5.0",
|
||||||
"prettier-plugin-organize-imports": "^4.3.0",
|
"prettier-plugin-organize-imports": "^4.3.0",
|
||||||
"ts-node": "~10.9.1",
|
"ts-node": "~10.9.1",
|
||||||
"typescript": "^5.9.3",
|
"typescript": "^5.9.3",
|
||||||
"webpack": "^5.105.3"
|
"webpack": "^5.105.0"
|
||||||
},
|
},
|
||||||
"packageManager": "pnpm@10.17.1",
|
"packageManager": "pnpm@10.17.1",
|
||||||
"pnpm": {
|
"pnpm": {
|
||||||
|
|||||||
2599
src-ui/pnpm-lock.yaml
generated
2599
src-ui/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -19,18 +19,13 @@
|
|||||||
<div class="col">
|
<div class="col">
|
||||||
<div class="card bg-light">
|
<div class="card bg-light">
|
||||||
<div class="card-body">
|
<div class="card-body">
|
||||||
<div class="card-title d-flex align-items-center">
|
<div class="card-title">
|
||||||
<h6 class="mb-0">
|
<h6>
|
||||||
{{option.title}}
|
{{option.title}}
|
||||||
|
<a class="btn btn-sm btn-link" title="Read the documentation about this setting" i18n-title [href]="getDocsUrl(option.config_key)" target="_blank" referrerpolicy="no-referrer">
|
||||||
|
<i-bs name="info-circle"></i-bs>
|
||||||
|
</a>
|
||||||
</h6>
|
</h6>
|
||||||
<a class="btn btn-sm btn-link" title="Read the documentation about this setting" i18n-title [href]="getDocsUrl(option.config_key)" target="_blank" referrerpolicy="no-referrer">
|
|
||||||
<i-bs name="info-circle"></i-bs>
|
|
||||||
</a>
|
|
||||||
@if (isSet(option.key)) {
|
|
||||||
<button type="button" class="btn btn-sm btn-link text-danger ms-auto pe-0" title="Reset" i18n-title (click)="resetOption(option.key)">
|
|
||||||
<i-bs class="me-1" name="x"></i-bs><ng-container i18n>Reset</ng-container>
|
|
||||||
</button>
|
|
||||||
}
|
|
||||||
</div>
|
</div>
|
||||||
<div class="mb-n3">
|
<div class="mb-n3">
|
||||||
@switch (option.type) {
|
@switch (option.type) {
|
||||||
|
|||||||
@@ -144,18 +144,4 @@ describe('ConfigComponent', () => {
|
|||||||
component.uploadFile(new File([], 'test.png'), 'app_logo')
|
component.uploadFile(new File([], 'test.png'), 'app_logo')
|
||||||
expect(initSpy).toHaveBeenCalled()
|
expect(initSpy).toHaveBeenCalled()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should reset option to null', () => {
|
|
||||||
component.configForm.patchValue({ output_type: OutputTypeConfig.PDF_A })
|
|
||||||
expect(component.isSet('output_type')).toBeTruthy()
|
|
||||||
component.resetOption('output_type')
|
|
||||||
expect(component.configForm.get('output_type').value).toBeNull()
|
|
||||||
expect(component.isSet('output_type')).toBeFalsy()
|
|
||||||
component.configForm.patchValue({ app_title: 'Test Title' })
|
|
||||||
component.resetOption('app_title')
|
|
||||||
expect(component.configForm.get('app_title').value).toBeNull()
|
|
||||||
component.configForm.patchValue({ barcodes_enabled: true })
|
|
||||||
component.resetOption('barcodes_enabled')
|
|
||||||
expect(component.configForm.get('barcodes_enabled').value).toBeNull()
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -210,12 +210,4 @@ export class ConfigComponent
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
public isSet(key: string): boolean {
|
|
||||||
return this.configForm.get(key).value != null
|
|
||||||
}
|
|
||||||
|
|
||||||
public resetOption(key: string) {
|
|
||||||
this.configForm.get(key).setValue(null)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -57,7 +57,7 @@
|
|||||||
}
|
}
|
||||||
</div>
|
</div>
|
||||||
@for (version of versions; track version.id) {
|
@for (version of versions; track version.id) {
|
||||||
<div class="dropdown-item border-top px-0" [class.pe-3]="versions.length === 1">
|
<div class="dropdown-item border-top px-0">
|
||||||
<div class="d-flex align-items-center w-100 py-2 version-item">
|
<div class="d-flex align-items-center w-100 py-2 version-item">
|
||||||
<div class="btn btn-link link-underline link-underline-opacity-0 d-flex align-items-center small text-start p-0 version-link"
|
<div class="btn btn-link link-underline link-underline-opacity-0 d-flex align-items-center small text-start p-0 version-link"
|
||||||
(click)="selectVersion(version.id)"
|
(click)="selectVersion(version.id)"
|
||||||
@@ -88,7 +88,7 @@
|
|||||||
@if (version.version_label) {
|
@if (version.version_label) {
|
||||||
{{ version.version_label }}
|
{{ version.version_label }}
|
||||||
} @else {
|
} @else {
|
||||||
<span class="fst-italic"><ng-container i18n>Version</ng-container> {{ versions.length - $index }} <span class="text-muted small">(#{{ version.id }})</span></span>
|
<span i18n>Version</span> #{{ version.id }}
|
||||||
}
|
}
|
||||||
</span>
|
</span>
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ export const environment = {
|
|||||||
apiVersion: '9', // match src/paperless/settings.py
|
apiVersion: '9', // match src/paperless/settings.py
|
||||||
appTitle: 'Paperless-ngx',
|
appTitle: 'Paperless-ngx',
|
||||||
tag: 'prod',
|
tag: 'prod',
|
||||||
version: '2.20.9',
|
version: '2.20.8',
|
||||||
webSocketHost: window.location.host,
|
webSocketHost: window.location.host,
|
||||||
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
|
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
|
||||||
webSocketBaseUrl: base_url.pathname + 'ws/',
|
webSocketBaseUrl: base_url.pathname + 'ws/',
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
|
from datetime import UTC
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from datetime import timezone
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -139,7 +139,7 @@ def thumbnail_last_modified(request: Any, pk: int) -> datetime | None:
|
|||||||
# No cache, get the timestamp and cache the datetime
|
# No cache, get the timestamp and cache the datetime
|
||||||
last_modified = datetime.fromtimestamp(
|
last_modified = datetime.fromtimestamp(
|
||||||
doc.thumbnail_path.stat().st_mtime,
|
doc.thumbnail_path.stat().st_mtime,
|
||||||
tz=timezone.utc,
|
tz=UTC,
|
||||||
)
|
)
|
||||||
cache.set(doc_key, last_modified, CACHE_50_MINUTES)
|
cache.set(doc_key, last_modified, CACHE_50_MINUTES)
|
||||||
return last_modified
|
return last_modified
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import datetime
|
|||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from typing import Final
|
from typing import Final
|
||||||
@@ -43,7 +43,6 @@ from documents.plugins.helpers import ProgressManager
|
|||||||
from documents.plugins.helpers import ProgressStatusOptions
|
from documents.plugins.helpers import ProgressStatusOptions
|
||||||
from documents.signals import document_consumption_finished
|
from documents.signals import document_consumption_finished
|
||||||
from documents.signals import document_consumption_started
|
from documents.signals import document_consumption_started
|
||||||
from documents.signals import document_updated
|
|
||||||
from documents.signals.handlers import run_workflows
|
from documents.signals.handlers import run_workflows
|
||||||
from documents.templating.workflows import parse_w_workflow_placeholders
|
from documents.templating.workflows import parse_w_workflow_placeholders
|
||||||
from documents.utils import copy_basic_file_stats
|
from documents.utils import copy_basic_file_stats
|
||||||
@@ -81,7 +80,7 @@ class ConsumerError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ConsumerStatusShortMessage(str, Enum):
|
class ConsumerStatusShortMessage(StrEnum):
|
||||||
DOCUMENT_ALREADY_EXISTS = "document_already_exists"
|
DOCUMENT_ALREADY_EXISTS = "document_already_exists"
|
||||||
DOCUMENT_ALREADY_EXISTS_IN_TRASH = "document_already_exists_in_trash"
|
DOCUMENT_ALREADY_EXISTS_IN_TRASH = "document_already_exists_in_trash"
|
||||||
ASN_ALREADY_EXISTS = "asn_already_exists"
|
ASN_ALREADY_EXISTS = "asn_already_exists"
|
||||||
@@ -647,12 +646,6 @@ class ConsumerPlugin(
|
|||||||
# This triggers things like file renaming
|
# This triggers things like file renaming
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
if document.root_document_id:
|
|
||||||
document_updated.send(
|
|
||||||
sender=self.__class__,
|
|
||||||
document=document.root_document,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete the file only if it was successfully consumed
|
# Delete the file only if it was successfully consumed
|
||||||
self.log.debug(f"Deleting original file {self.input_doc.original_file}")
|
self.log.debug(f"Deleting original file {self.input_doc.original_file}")
|
||||||
self.input_doc.original_file.unlink()
|
self.input_doc.original_file.unlink()
|
||||||
|
|||||||
@@ -5,10 +5,10 @@ import math
|
|||||||
import re
|
import re
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from datetime import UTC
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from datetime import time
|
from datetime import time
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from datetime import timezone
|
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
@@ -437,7 +437,7 @@ class ManualResults:
|
|||||||
class LocalDateParser(English):
|
class LocalDateParser(English):
|
||||||
def reverse_timezone_offset(self, d):
|
def reverse_timezone_offset(self, d):
|
||||||
return (d.replace(tzinfo=django_timezone.get_current_timezone())).astimezone(
|
return (d.replace(tzinfo=django_timezone.get_current_timezone())).astimezone(
|
||||||
timezone.utc,
|
UTC,
|
||||||
)
|
)
|
||||||
|
|
||||||
def date_from(self, *args, **kwargs):
|
def date_from(self, *args, **kwargs):
|
||||||
@@ -641,8 +641,8 @@ def rewrite_natural_date_keywords(query_string: str) -> str:
|
|||||||
end = datetime(local_now.year - 1, 12, 31, 23, 59, 59, tzinfo=tz)
|
end = datetime(local_now.year - 1, 12, 31, 23, 59, 59, tzinfo=tz)
|
||||||
|
|
||||||
# Convert to UTC and format
|
# Convert to UTC and format
|
||||||
start_str = start.astimezone(timezone.utc).strftime("%Y%m%d%H%M%S")
|
start_str = start.astimezone(UTC).strftime("%Y%m%d%H%M%S")
|
||||||
end_str = end.astimezone(timezone.utc).strftime("%Y%m%d%H%M%S")
|
end_str = end.astimezone(UTC).strftime("%Y%m%d%H%M%S")
|
||||||
return f"{field}:[{start_str} TO {end_str}]"
|
return f"{field}:[{start_str} TO {end_str}]"
|
||||||
|
|
||||||
return re.sub(pattern, repl, query_string, flags=re.IGNORECASE)
|
return re.sub(pattern, repl, query_string, flags=re.IGNORECASE)
|
||||||
|
|||||||
@@ -1,320 +0,0 @@
|
|||||||
"""
|
|
||||||
Base command class for Paperless-ngx management commands.
|
|
||||||
|
|
||||||
Provides automatic progress bar and multiprocessing support with minimal boilerplate.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
from collections.abc import Iterable
|
|
||||||
from collections.abc import Sized
|
|
||||||
from concurrent.futures import ProcessPoolExecutor
|
|
||||||
from concurrent.futures import as_completed
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
from typing import Any
|
|
||||||
from typing import ClassVar
|
|
||||||
from typing import Generic
|
|
||||||
from typing import TypeVar
|
|
||||||
|
|
||||||
from django import db
|
|
||||||
from django.core.management import CommandError
|
|
||||||
from django.db.models import QuerySet
|
|
||||||
from django_rich.management import RichCommand
|
|
||||||
from rich.console import Console
|
|
||||||
from rich.progress import BarColumn
|
|
||||||
from rich.progress import MofNCompleteColumn
|
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import SpinnerColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeElapsedColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from collections.abc import Callable
|
|
||||||
from collections.abc import Generator
|
|
||||||
from collections.abc import Iterable
|
|
||||||
from collections.abc import Sequence
|
|
||||||
|
|
||||||
from django.core.management import CommandParser
|
|
||||||
|
|
||||||
T = TypeVar("T")
|
|
||||||
R = TypeVar("R")
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, slots=True)
|
|
||||||
class ProcessResult(Generic[T, R]):
|
|
||||||
"""
|
|
||||||
Result of processing a single item in parallel.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
item: The input item that was processed.
|
|
||||||
result: The return value from the processing function, or None if an error occurred.
|
|
||||||
error: The exception if processing failed, or None on success.
|
|
||||||
"""
|
|
||||||
|
|
||||||
item: T
|
|
||||||
result: R | None
|
|
||||||
error: BaseException | None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def success(self) -> bool:
|
|
||||||
"""Return True if the item was processed successfully."""
|
|
||||||
return self.error is None
|
|
||||||
|
|
||||||
|
|
||||||
class PaperlessCommand(RichCommand):
|
|
||||||
"""
|
|
||||||
Base command class with automatic progress bar and multiprocessing support.
|
|
||||||
|
|
||||||
Features are opt-in via class attributes:
|
|
||||||
supports_progress_bar: Adds --no-progress-bar argument (default: True)
|
|
||||||
supports_multiprocessing: Adds --processes argument (default: False)
|
|
||||||
|
|
||||||
Example usage:
|
|
||||||
|
|
||||||
class Command(PaperlessCommand):
|
|
||||||
help = "Process all documents"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
documents = Document.objects.all()
|
|
||||||
for doc in self.track(documents, description="Processing..."):
|
|
||||||
process_document(doc)
|
|
||||||
|
|
||||||
class Command(PaperlessCommand):
|
|
||||||
help = "Regenerate thumbnails"
|
|
||||||
supports_multiprocessing = True
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
ids = list(Document.objects.values_list("id", flat=True))
|
|
||||||
for result in self.process_parallel(process_doc, ids):
|
|
||||||
if result.error:
|
|
||||||
self.console.print(f"[red]Failed: {result.error}[/red]")
|
|
||||||
"""
|
|
||||||
|
|
||||||
supports_progress_bar: ClassVar[bool] = True
|
|
||||||
supports_multiprocessing: ClassVar[bool] = False
|
|
||||||
|
|
||||||
# Instance attributes set by execute() before handle() runs
|
|
||||||
no_progress_bar: bool
|
|
||||||
process_count: int
|
|
||||||
|
|
||||||
def add_arguments(self, parser: CommandParser) -> None:
|
|
||||||
"""Add arguments based on supported features."""
|
|
||||||
super().add_arguments(parser)
|
|
||||||
|
|
||||||
if self.supports_progress_bar:
|
|
||||||
parser.add_argument(
|
|
||||||
"--no-progress-bar",
|
|
||||||
default=False,
|
|
||||||
action="store_true",
|
|
||||||
help="Disable the progress bar",
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.supports_multiprocessing:
|
|
||||||
default_processes = max(1, (os.cpu_count() or 1) // 4)
|
|
||||||
parser.add_argument(
|
|
||||||
"--processes",
|
|
||||||
default=default_processes,
|
|
||||||
type=int,
|
|
||||||
help=f"Number of processes to use (default: {default_processes})",
|
|
||||||
)
|
|
||||||
|
|
||||||
def execute(self, *args: Any, **options: Any) -> str | None:
|
|
||||||
"""
|
|
||||||
Set up instance state before handle() is called.
|
|
||||||
|
|
||||||
This is called by Django's command infrastructure after argument parsing
|
|
||||||
but before handle(). We use it to set instance attributes from options.
|
|
||||||
"""
|
|
||||||
# Set progress bar state
|
|
||||||
if self.supports_progress_bar:
|
|
||||||
self.no_progress_bar = options.get("no_progress_bar", False)
|
|
||||||
else:
|
|
||||||
self.no_progress_bar = True
|
|
||||||
|
|
||||||
# Set multiprocessing state
|
|
||||||
if self.supports_multiprocessing:
|
|
||||||
self.process_count = options.get("processes", 1)
|
|
||||||
if self.process_count < 1:
|
|
||||||
raise CommandError("--processes must be at least 1")
|
|
||||||
else:
|
|
||||||
self.process_count = 1
|
|
||||||
|
|
||||||
return super().execute(*args, **options)
|
|
||||||
|
|
||||||
def _create_progress(self, description: str) -> Progress:
|
|
||||||
"""
|
|
||||||
Create a configured Progress instance.
|
|
||||||
|
|
||||||
Progress output is directed to stderr to match the convention that
|
|
||||||
progress bars are transient UI feedback, not command output. This
|
|
||||||
mirrors tqdm's default behavior and prevents progress bar rendering
|
|
||||||
from interfering with stdout-based assertions in tests or piped
|
|
||||||
command output.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
description: Text to display alongside the progress bar.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A Progress instance configured with appropriate columns.
|
|
||||||
"""
|
|
||||||
return Progress(
|
|
||||||
SpinnerColumn(),
|
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
MofNCompleteColumn(),
|
|
||||||
TimeElapsedColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
console=Console(stderr=True),
|
|
||||||
transient=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_iterable_length(self, iterable: Iterable[object]) -> int | None:
|
|
||||||
"""
|
|
||||||
Attempt to determine the length of an iterable without consuming it.
|
|
||||||
|
|
||||||
Tries .count() first (for Django querysets - executes SELECT COUNT(*)),
|
|
||||||
then falls back to len() for sequences.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
iterable: The iterable to measure.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The length if determinable, None otherwise.
|
|
||||||
"""
|
|
||||||
if isinstance(iterable, QuerySet):
|
|
||||||
return iterable.count()
|
|
||||||
|
|
||||||
if isinstance(iterable, Sized):
|
|
||||||
return len(iterable)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def track(
|
|
||||||
self,
|
|
||||||
iterable: Iterable[T],
|
|
||||||
*,
|
|
||||||
description: str = "Processing...",
|
|
||||||
total: int | None = None,
|
|
||||||
) -> Generator[T, None, None]:
|
|
||||||
"""
|
|
||||||
Iterate over items with an optional progress bar.
|
|
||||||
|
|
||||||
Respects --no-progress-bar flag. When disabled, simply yields items
|
|
||||||
without any progress display.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
iterable: The items to iterate over.
|
|
||||||
description: Text to display alongside the progress bar.
|
|
||||||
total: Total number of items. If None, attempts to determine
|
|
||||||
automatically via .count() (for querysets) or len().
|
|
||||||
|
|
||||||
Yields:
|
|
||||||
Items from the iterable.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
for doc in self.track(documents, description="Renaming..."):
|
|
||||||
process(doc)
|
|
||||||
"""
|
|
||||||
if self.no_progress_bar:
|
|
||||||
yield from iterable
|
|
||||||
return
|
|
||||||
|
|
||||||
# Attempt to determine total if not provided
|
|
||||||
if total is None:
|
|
||||||
total = self._get_iterable_length(iterable)
|
|
||||||
|
|
||||||
with self._create_progress(description) as progress:
|
|
||||||
task_id = progress.add_task(description, total=total)
|
|
||||||
for item in iterable:
|
|
||||||
yield item
|
|
||||||
progress.advance(task_id)
|
|
||||||
|
|
||||||
def process_parallel(
|
|
||||||
self,
|
|
||||||
fn: Callable[[T], R],
|
|
||||||
items: Sequence[T],
|
|
||||||
*,
|
|
||||||
description: str = "Processing...",
|
|
||||||
) -> Generator[ProcessResult[T, R], None, None]:
|
|
||||||
"""
|
|
||||||
Process items in parallel with progress tracking.
|
|
||||||
|
|
||||||
When --processes=1, runs sequentially in the main process without
|
|
||||||
spawning subprocesses. This is critical for testing, as multiprocessing
|
|
||||||
breaks fixtures, mocks, and database transactions.
|
|
||||||
|
|
||||||
When --processes > 1, uses ProcessPoolExecutor and automatically closes
|
|
||||||
database connections before spawning workers (required for PostgreSQL).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
fn: Function to apply to each item. Must be picklable for parallel
|
|
||||||
execution (i.e., defined at module level, not a lambda or closure).
|
|
||||||
items: Sequence of items to process.
|
|
||||||
description: Text to display alongside the progress bar.
|
|
||||||
|
|
||||||
Yields:
|
|
||||||
ProcessResult for each item, containing the item, result, and any error.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
def regenerate_thumbnail(doc_id: int) -> Path:
|
|
||||||
...
|
|
||||||
|
|
||||||
for result in self.process_parallel(regenerate_thumbnail, doc_ids):
|
|
||||||
if result.error:
|
|
||||||
self.console.print(f"[red]Failed {result.item}[/red]")
|
|
||||||
"""
|
|
||||||
total = len(items)
|
|
||||||
|
|
||||||
if self.process_count == 1:
|
|
||||||
# Sequential execution in main process - critical for testing
|
|
||||||
yield from self._process_sequential(fn, items, description, total)
|
|
||||||
else:
|
|
||||||
# Parallel execution with ProcessPoolExecutor
|
|
||||||
yield from self._process_parallel(fn, items, description, total)
|
|
||||||
|
|
||||||
def _process_sequential(
|
|
||||||
self,
|
|
||||||
fn: Callable[[T], R],
|
|
||||||
items: Sequence[T],
|
|
||||||
description: str,
|
|
||||||
total: int,
|
|
||||||
) -> Generator[ProcessResult[T, R], None, None]:
|
|
||||||
"""Process items sequentially in the main process."""
|
|
||||||
for item in self.track(items, description=description, total=total):
|
|
||||||
try:
|
|
||||||
result = fn(item)
|
|
||||||
yield ProcessResult(item=item, result=result, error=None)
|
|
||||||
except Exception as e:
|
|
||||||
yield ProcessResult(item=item, result=None, error=e)
|
|
||||||
|
|
||||||
def _process_parallel(
|
|
||||||
self,
|
|
||||||
fn: Callable[[T], R],
|
|
||||||
items: Sequence[T],
|
|
||||||
description: str,
|
|
||||||
total: int,
|
|
||||||
) -> Generator[ProcessResult[T, R], None, None]:
|
|
||||||
"""Process items in parallel using ProcessPoolExecutor."""
|
|
||||||
# Close database connections before forking - required for PostgreSQL
|
|
||||||
db.connections.close_all()
|
|
||||||
|
|
||||||
with self._create_progress(description) as progress:
|
|
||||||
task_id = progress.add_task(description, total=total)
|
|
||||||
|
|
||||||
with ProcessPoolExecutor(max_workers=self.process_count) as executor:
|
|
||||||
# Submit all tasks and map futures back to items
|
|
||||||
future_to_item = {executor.submit(fn, item): item for item in items}
|
|
||||||
|
|
||||||
# Yield results as they complete
|
|
||||||
for future in as_completed(future_to_item):
|
|
||||||
item = future_to_item[future]
|
|
||||||
try:
|
|
||||||
result = future.result()
|
|
||||||
yield ProcessResult(item=item, result=result, error=None)
|
|
||||||
except Exception as e:
|
|
||||||
yield ProcessResult(item=item, result=None, error=e)
|
|
||||||
finally:
|
|
||||||
progress.advance(task_id)
|
|
||||||
@@ -1,15 +1,20 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import multiprocessing
|
||||||
|
|
||||||
|
import tqdm
|
||||||
|
from django import db
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
from documents.management.commands.base import PaperlessCommand
|
from documents.management.commands.mixins import MultiProcessMixin
|
||||||
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
from documents.tasks import update_document_content_maybe_archive_file
|
from documents.tasks import update_document_content_maybe_archive_file
|
||||||
|
|
||||||
logger = logging.getLogger("paperless.management.archiver")
|
logger = logging.getLogger("paperless.management.archiver")
|
||||||
|
|
||||||
|
|
||||||
class Command(PaperlessCommand):
|
class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
|
||||||
help = (
|
help = (
|
||||||
"Using the current classification model, assigns correspondents, tags "
|
"Using the current classification model, assigns correspondents, tags "
|
||||||
"and document types to all documents, effectively allowing you to "
|
"and document types to all documents, effectively allowing you to "
|
||||||
@@ -17,10 +22,7 @@ class Command(PaperlessCommand):
|
|||||||
"modified) after their initial import."
|
"modified) after their initial import."
|
||||||
)
|
)
|
||||||
|
|
||||||
supports_multiprocessing = True
|
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
super().add_arguments(parser)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-f",
|
"-f",
|
||||||
"--overwrite",
|
"--overwrite",
|
||||||
@@ -42,8 +44,13 @@ class Command(PaperlessCommand):
|
|||||||
"run on this specific document."
|
"run on this specific document."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
self.add_argument_progress_bar_mixin(parser)
|
||||||
|
self.add_argument_processes_mixin(parser)
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
|
self.handle_processes_mixin(**options)
|
||||||
|
self.handle_progress_bar_mixin(**options)
|
||||||
|
|
||||||
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
overwrite = options["overwrite"]
|
overwrite = options["overwrite"]
|
||||||
@@ -53,21 +60,35 @@ class Command(PaperlessCommand):
|
|||||||
else:
|
else:
|
||||||
documents = Document.objects.all()
|
documents = Document.objects.all()
|
||||||
|
|
||||||
document_ids = [
|
document_ids = list(
|
||||||
doc.id for doc in documents if overwrite or not doc.has_archive_version
|
map(
|
||||||
]
|
lambda doc: doc.id,
|
||||||
|
filter(lambda d: overwrite or not d.has_archive_version, documents),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Note to future self: this prevents django from reusing database
|
||||||
|
# connections between processes, which is bad and does not work
|
||||||
|
# with postgres.
|
||||||
|
db.connections.close_all()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logging.getLogger().handlers[0].level = logging.ERROR
|
logging.getLogger().handlers[0].level = logging.ERROR
|
||||||
|
|
||||||
for result in self.process_parallel(
|
if self.process_count == 1:
|
||||||
update_document_content_maybe_archive_file,
|
for doc_id in document_ids:
|
||||||
document_ids,
|
update_document_content_maybe_archive_file(doc_id)
|
||||||
description="Archiving...",
|
else: # pragma: no cover
|
||||||
):
|
with multiprocessing.Pool(self.process_count) as pool:
|
||||||
if result.error:
|
list(
|
||||||
self.console.print(
|
tqdm.tqdm(
|
||||||
f"[red]Failed document {result.item}: {result.error}[/red]",
|
pool.imap_unordered(
|
||||||
|
update_document_content_maybe_archive_file,
|
||||||
|
document_ids,
|
||||||
|
),
|
||||||
|
total=len(document_ids),
|
||||||
|
disable=self.no_progress_bar,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
except KeyboardInterrupt: # pragma: no cover
|
except KeyboardInterrupt:
|
||||||
self.console.print("[yellow]Aborting...[/yellow]")
|
self.stdout.write(self.style.NOTICE("Aborting..."))
|
||||||
|
|||||||
@@ -1,20 +1,24 @@
|
|||||||
import dataclasses
|
import dataclasses
|
||||||
|
import multiprocessing
|
||||||
from typing import Final
|
from typing import Final
|
||||||
|
|
||||||
import rapidfuzz
|
import rapidfuzz
|
||||||
|
import tqdm
|
||||||
|
from django.core.management import BaseCommand
|
||||||
from django.core.management import CommandError
|
from django.core.management import CommandError
|
||||||
|
|
||||||
from documents.management.commands.base import PaperlessCommand
|
from documents.management.commands.mixins import MultiProcessMixin
|
||||||
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass(frozen=True, slots=True)
|
@dataclasses.dataclass(frozen=True)
|
||||||
class _WorkPackage:
|
class _WorkPackage:
|
||||||
first_doc: Document
|
first_doc: Document
|
||||||
second_doc: Document
|
second_doc: Document
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass(frozen=True, slots=True)
|
@dataclasses.dataclass(frozen=True)
|
||||||
class _WorkResult:
|
class _WorkResult:
|
||||||
doc_one_pk: int
|
doc_one_pk: int
|
||||||
doc_two_pk: int
|
doc_two_pk: int
|
||||||
@@ -27,23 +31,22 @@ class _WorkResult:
|
|||||||
def _process_and_match(work: _WorkPackage) -> _WorkResult:
|
def _process_and_match(work: _WorkPackage) -> _WorkResult:
|
||||||
"""
|
"""
|
||||||
Does basic processing of document content, gets the basic ratio
|
Does basic processing of document content, gets the basic ratio
|
||||||
and returns the result package.
|
and returns the result package
|
||||||
"""
|
"""
|
||||||
|
# Normalize the string some, lower case, whitespace, etc
|
||||||
first_string = rapidfuzz.utils.default_process(work.first_doc.content)
|
first_string = rapidfuzz.utils.default_process(work.first_doc.content)
|
||||||
second_string = rapidfuzz.utils.default_process(work.second_doc.content)
|
second_string = rapidfuzz.utils.default_process(work.second_doc.content)
|
||||||
|
|
||||||
|
# Basic matching ratio
|
||||||
match = rapidfuzz.fuzz.ratio(first_string, second_string)
|
match = rapidfuzz.fuzz.ratio(first_string, second_string)
|
||||||
|
|
||||||
return _WorkResult(work.first_doc.pk, work.second_doc.pk, match)
|
return _WorkResult(work.first_doc.pk, work.second_doc.pk, match)
|
||||||
|
|
||||||
|
|
||||||
class Command(PaperlessCommand):
|
class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
|
||||||
help = "Searches for documents where the content almost matches"
|
help = "Searches for documents where the content almost matches"
|
||||||
|
|
||||||
supports_multiprocessing = True
|
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
super().add_arguments(parser)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--ratio",
|
"--ratio",
|
||||||
default=85.0,
|
default=85.0,
|
||||||
@@ -56,11 +59,16 @@ class Command(PaperlessCommand):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="If set, one document of matches above the ratio WILL BE DELETED",
|
help="If set, one document of matches above the ratio WILL BE DELETED",
|
||||||
)
|
)
|
||||||
|
self.add_argument_progress_bar_mixin(parser)
|
||||||
|
self.add_argument_processes_mixin(parser)
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
RATIO_MIN: Final[float] = 0.0
|
RATIO_MIN: Final[float] = 0.0
|
||||||
RATIO_MAX: Final[float] = 100.0
|
RATIO_MAX: Final[float] = 100.0
|
||||||
|
|
||||||
|
self.handle_processes_mixin(**options)
|
||||||
|
self.handle_progress_bar_mixin(**options)
|
||||||
|
|
||||||
if options["delete"]:
|
if options["delete"]:
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
self.style.WARNING(
|
self.style.WARNING(
|
||||||
@@ -72,58 +80,66 @@ class Command(PaperlessCommand):
|
|||||||
checked_pairs: set[tuple[int, int]] = set()
|
checked_pairs: set[tuple[int, int]] = set()
|
||||||
work_pkgs: list[_WorkPackage] = []
|
work_pkgs: list[_WorkPackage] = []
|
||||||
|
|
||||||
|
# Ratio is a float from 0.0 to 100.0
|
||||||
if opt_ratio < RATIO_MIN or opt_ratio > RATIO_MAX:
|
if opt_ratio < RATIO_MIN or opt_ratio > RATIO_MAX:
|
||||||
raise CommandError("The ratio must be between 0 and 100")
|
raise CommandError("The ratio must be between 0 and 100")
|
||||||
|
|
||||||
all_docs = Document.objects.all().order_by("id")
|
all_docs = Document.objects.all().order_by("id")
|
||||||
|
|
||||||
|
# Build work packages for processing
|
||||||
for first_doc in all_docs:
|
for first_doc in all_docs:
|
||||||
for second_doc in all_docs:
|
for second_doc in all_docs:
|
||||||
|
# doc to doc is obviously not useful
|
||||||
if first_doc.pk == second_doc.pk:
|
if first_doc.pk == second_doc.pk:
|
||||||
continue
|
continue
|
||||||
|
# Skip empty documents (e.g. password-protected)
|
||||||
if first_doc.content.strip() == "" or second_doc.content.strip() == "":
|
if first_doc.content.strip() == "" or second_doc.content.strip() == "":
|
||||||
continue
|
continue
|
||||||
|
# Skip matching which have already been matched together
|
||||||
|
# doc 1 to doc 2 is the same as doc 2 to doc 1
|
||||||
doc_1_to_doc_2 = (first_doc.pk, second_doc.pk)
|
doc_1_to_doc_2 = (first_doc.pk, second_doc.pk)
|
||||||
doc_2_to_doc_1 = doc_1_to_doc_2[::-1]
|
doc_2_to_doc_1 = doc_1_to_doc_2[::-1]
|
||||||
if doc_1_to_doc_2 in checked_pairs or doc_2_to_doc_1 in checked_pairs:
|
if doc_1_to_doc_2 in checked_pairs or doc_2_to_doc_1 in checked_pairs:
|
||||||
continue
|
continue
|
||||||
checked_pairs.update([doc_1_to_doc_2, doc_2_to_doc_1])
|
checked_pairs.update([doc_1_to_doc_2, doc_2_to_doc_1])
|
||||||
|
# Actually something useful to work on now
|
||||||
work_pkgs.append(_WorkPackage(first_doc, second_doc))
|
work_pkgs.append(_WorkPackage(first_doc, second_doc))
|
||||||
|
|
||||||
results: list[_WorkResult] = []
|
# Don't spin up a pool of 1 process
|
||||||
if self.process_count == 1:
|
if self.process_count == 1:
|
||||||
for work in self.track(work_pkgs, description="Matching..."):
|
results = []
|
||||||
|
for work in tqdm.tqdm(work_pkgs, disable=self.no_progress_bar):
|
||||||
results.append(_process_and_match(work))
|
results.append(_process_and_match(work))
|
||||||
else: # pragma: no cover
|
else: # pragma: no cover
|
||||||
for proc_result in self.process_parallel(
|
with multiprocessing.Pool(processes=self.process_count) as pool:
|
||||||
_process_and_match,
|
results = list(
|
||||||
work_pkgs,
|
tqdm.tqdm(
|
||||||
description="Matching...",
|
pool.imap_unordered(_process_and_match, work_pkgs),
|
||||||
):
|
total=len(work_pkgs),
|
||||||
if proc_result.error:
|
disable=self.no_progress_bar,
|
||||||
self.console.print(
|
|
||||||
f"[red]Failed: {proc_result.error}[/red]",
|
|
||||||
)
|
|
||||||
elif proc_result.result is not None:
|
|
||||||
results.append(proc_result.result)
|
|
||||||
|
|
||||||
messages: list[str] = []
|
|
||||||
maybe_delete_ids: list[int] = []
|
|
||||||
for match_result in sorted(results):
|
|
||||||
if match_result.ratio >= opt_ratio:
|
|
||||||
messages.append(
|
|
||||||
self.style.NOTICE(
|
|
||||||
f"Document {match_result.doc_one_pk} fuzzy match"
|
|
||||||
f" to {match_result.doc_two_pk}"
|
|
||||||
f" (confidence {match_result.ratio:.3f})\n",
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
maybe_delete_ids.append(match_result.doc_two_pk)
|
|
||||||
|
# Check results
|
||||||
|
messages = []
|
||||||
|
maybe_delete_ids = []
|
||||||
|
for result in sorted(results):
|
||||||
|
if result.ratio >= opt_ratio:
|
||||||
|
messages.append(
|
||||||
|
self.style.NOTICE(
|
||||||
|
f"Document {result.doc_one_pk} fuzzy match"
|
||||||
|
f" to {result.doc_two_pk} (confidence {result.ratio:.3f})\n",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
maybe_delete_ids.append(result.doc_two_pk)
|
||||||
|
|
||||||
if len(messages) == 0:
|
if len(messages) == 0:
|
||||||
messages.append(self.style.SUCCESS("No matches found\n"))
|
messages.append(
|
||||||
self.stdout.writelines(messages)
|
self.style.SUCCESS("No matches found\n"),
|
||||||
|
)
|
||||||
|
self.stdout.writelines(
|
||||||
|
messages,
|
||||||
|
)
|
||||||
if options["delete"]:
|
if options["delete"]:
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
self.style.NOTICE(
|
self.style.NOTICE(
|
||||||
|
|||||||
@@ -1,12 +1,25 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
import tqdm
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
from django.db.models.signals import post_save
|
from django.db.models.signals import post_save
|
||||||
|
|
||||||
from documents.management.commands.base import PaperlessCommand
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
|
|
||||||
|
|
||||||
class Command(PaperlessCommand):
|
class Command(ProgressBarMixin, BaseCommand):
|
||||||
help = "Rename all documents"
|
help = "This will rename all documents to match the latest filename format."
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
self.add_argument_progress_bar_mixin(parser)
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
for document in self.track(Document.objects.all(), description="Renaming..."):
|
self.handle_progress_bar_mixin(**options)
|
||||||
|
logging.getLogger().handlers[0].level = logging.ERROR
|
||||||
|
|
||||||
|
for document in tqdm.tqdm(
|
||||||
|
Document.objects.all(),
|
||||||
|
disable=self.no_progress_bar,
|
||||||
|
):
|
||||||
post_save.send(Document, instance=document, created=False)
|
post_save.send(Document, instance=document, created=False)
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import tqdm
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
from documents.classifier import load_classifier
|
from documents.classifier import load_classifier
|
||||||
from documents.management.commands.base import PaperlessCommand
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
from documents.signals.handlers import set_correspondent
|
from documents.signals.handlers import set_correspondent
|
||||||
from documents.signals.handlers import set_document_type
|
from documents.signals.handlers import set_document_type
|
||||||
@@ -11,7 +14,7 @@ from documents.signals.handlers import set_tags
|
|||||||
logger = logging.getLogger("paperless.management.retagger")
|
logger = logging.getLogger("paperless.management.retagger")
|
||||||
|
|
||||||
|
|
||||||
class Command(PaperlessCommand):
|
class Command(ProgressBarMixin, BaseCommand):
|
||||||
help = (
|
help = (
|
||||||
"Using the current classification model, assigns correspondents, tags "
|
"Using the current classification model, assigns correspondents, tags "
|
||||||
"and document types to all documents, effectively allowing you to "
|
"and document types to all documents, effectively allowing you to "
|
||||||
@@ -20,7 +23,6 @@ class Command(PaperlessCommand):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
super().add_arguments(parser)
|
|
||||||
parser.add_argument("-c", "--correspondent", default=False, action="store_true")
|
parser.add_argument("-c", "--correspondent", default=False, action="store_true")
|
||||||
parser.add_argument("-T", "--tags", default=False, action="store_true")
|
parser.add_argument("-T", "--tags", default=False, action="store_true")
|
||||||
parser.add_argument("-t", "--document_type", default=False, action="store_true")
|
parser.add_argument("-t", "--document_type", default=False, action="store_true")
|
||||||
@@ -32,7 +34,7 @@ class Command(PaperlessCommand):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help=(
|
help=(
|
||||||
"By default this command won't try to assign a correspondent "
|
"By default this command won't try to assign a correspondent "
|
||||||
"if more than one matches the document. Use this flag if "
|
"if more than one matches the document. Use this flag if "
|
||||||
"you'd rather it just pick the first one it finds."
|
"you'd rather it just pick the first one it finds."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@@ -47,6 +49,7 @@ class Command(PaperlessCommand):
|
|||||||
"and tags that do not match anymore due to changed rules."
|
"and tags that do not match anymore due to changed rules."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
self.add_argument_progress_bar_mixin(parser)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--suggest",
|
"--suggest",
|
||||||
default=False,
|
default=False,
|
||||||
@@ -65,6 +68,8 @@ class Command(PaperlessCommand):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
|
self.handle_progress_bar_mixin(**options)
|
||||||
|
|
||||||
if options["inbox_only"]:
|
if options["inbox_only"]:
|
||||||
queryset = Document.objects.filter(tags__is_inbox_tag=True)
|
queryset = Document.objects.filter(tags__is_inbox_tag=True)
|
||||||
else:
|
else:
|
||||||
@@ -79,7 +84,7 @@ class Command(PaperlessCommand):
|
|||||||
|
|
||||||
classifier = load_classifier()
|
classifier = load_classifier()
|
||||||
|
|
||||||
for document in self.track(documents, description="Retagging..."):
|
for document in tqdm.tqdm(documents, disable=self.no_progress_bar):
|
||||||
if options["correspondent"]:
|
if options["correspondent"]:
|
||||||
set_correspondent(
|
set_correspondent(
|
||||||
sender=None,
|
sender=None,
|
||||||
@@ -117,7 +122,6 @@ class Command(PaperlessCommand):
|
|||||||
stdout=self.stdout,
|
stdout=self.stdout,
|
||||||
style_func=self.style,
|
style_func=self.style,
|
||||||
)
|
)
|
||||||
|
|
||||||
if options["storage_path"]:
|
if options["storage_path"]:
|
||||||
set_storage_path(
|
set_storage_path(
|
||||||
sender=None,
|
sender=None,
|
||||||
|
|||||||
@@ -1,45 +1,43 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import multiprocessing
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
from documents.management.commands.base import PaperlessCommand
|
import tqdm
|
||||||
|
from django import db
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from documents.management.commands.mixins import MultiProcessMixin
|
||||||
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
from documents.parsers import get_parser_class_for_mime_type
|
from documents.parsers import get_parser_class_for_mime_type
|
||||||
|
|
||||||
logger = logging.getLogger("paperless.management.thumbnails")
|
|
||||||
|
|
||||||
|
def _process_document(doc_id) -> None:
|
||||||
def _process_document(doc_id: int) -> None:
|
|
||||||
document: Document = Document.objects.get(id=doc_id)
|
document: Document = Document.objects.get(id=doc_id)
|
||||||
parser_class = get_parser_class_for_mime_type(document.mime_type)
|
parser_class = get_parser_class_for_mime_type(document.mime_type)
|
||||||
|
|
||||||
if parser_class is None:
|
if parser_class:
|
||||||
logger.warning(
|
parser = parser_class(logging_group=None)
|
||||||
"%s: No parser for mime type %s",
|
else:
|
||||||
document,
|
print(f"{document} No parser for mime type {document.mime_type}") # noqa: T201
|
||||||
document.mime_type,
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
parser = parser_class(logging_group=None)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
thumb = parser.get_thumbnail(
|
thumb = parser.get_thumbnail(
|
||||||
document.source_path,
|
document.source_path,
|
||||||
document.mime_type,
|
document.mime_type,
|
||||||
document.get_public_filename(),
|
document.get_public_filename(),
|
||||||
)
|
)
|
||||||
|
|
||||||
shutil.move(thumb, document.thumbnail_path)
|
shutil.move(thumb, document.thumbnail_path)
|
||||||
finally:
|
finally:
|
||||||
parser.cleanup()
|
parser.cleanup()
|
||||||
|
|
||||||
|
|
||||||
class Command(PaperlessCommand):
|
class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
|
||||||
help = "This will regenerate the thumbnails for all documents."
|
help = "This will regenerate the thumbnails for all documents."
|
||||||
|
|
||||||
supports_multiprocessing = True
|
|
||||||
|
|
||||||
def add_arguments(self, parser) -> None:
|
def add_arguments(self, parser) -> None:
|
||||||
super().add_arguments(parser)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-d",
|
"-d",
|
||||||
"--document",
|
"--document",
|
||||||
@@ -51,23 +49,36 @@ class Command(PaperlessCommand):
|
|||||||
"run on this specific document."
|
"run on this specific document."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
self.add_argument_progress_bar_mixin(parser)
|
||||||
|
self.add_argument_processes_mixin(parser)
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
logging.getLogger().handlers[0].level = logging.ERROR
|
logging.getLogger().handlers[0].level = logging.ERROR
|
||||||
|
|
||||||
|
self.handle_processes_mixin(**options)
|
||||||
|
self.handle_progress_bar_mixin(**options)
|
||||||
|
|
||||||
if options["document"]:
|
if options["document"]:
|
||||||
documents = Document.objects.filter(pk=options["document"])
|
documents = Document.objects.filter(pk=options["document"])
|
||||||
else:
|
else:
|
||||||
documents = Document.objects.all()
|
documents = Document.objects.all()
|
||||||
|
|
||||||
ids = list(documents.values_list("id", flat=True))
|
ids = [doc.id for doc in documents]
|
||||||
|
|
||||||
for result in self.process_parallel(
|
# Note to future self: this prevents django from reusing database
|
||||||
_process_document,
|
# connections between processes, which is bad and does not work
|
||||||
ids,
|
# with postgres.
|
||||||
description="Regenerating thumbnails...",
|
db.connections.close_all()
|
||||||
):
|
|
||||||
if result.error: # pragma: no cover
|
if self.process_count == 1:
|
||||||
self.console.print(
|
for doc_id in ids:
|
||||||
f"[red]Failed document {result.item}: {result.error}[/red]",
|
_process_document(doc_id)
|
||||||
|
else: # pragma: no cover
|
||||||
|
with multiprocessing.Pool(processes=self.process_count) as pool:
|
||||||
|
list(
|
||||||
|
tqdm.tqdm(
|
||||||
|
pool.imap_unordered(_process_document, ids),
|
||||||
|
total=len(ids),
|
||||||
|
disable=self.no_progress_bar,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -21,6 +21,26 @@ class CryptFields(TypedDict):
|
|||||||
fields: list[str]
|
fields: list[str]
|
||||||
|
|
||||||
|
|
||||||
|
class MultiProcessMixin:
|
||||||
|
"""
|
||||||
|
Small class to handle adding an argument and validating it
|
||||||
|
for the use of multiple processes
|
||||||
|
"""
|
||||||
|
|
||||||
|
def add_argument_processes_mixin(self, parser: ArgumentParser) -> None:
|
||||||
|
parser.add_argument(
|
||||||
|
"--processes",
|
||||||
|
default=max(1, os.cpu_count() // 4),
|
||||||
|
type=int,
|
||||||
|
help="Number of processes to distribute work amongst",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle_processes_mixin(self, *args, **options) -> None:
|
||||||
|
self.process_count = options["processes"]
|
||||||
|
if self.process_count < 1:
|
||||||
|
raise CommandError("There must be at least 1 process")
|
||||||
|
|
||||||
|
|
||||||
class ProgressBarMixin:
|
class ProgressBarMixin:
|
||||||
"""
|
"""
|
||||||
Many commands use a progress bar, which can be disabled
|
Many commands use a progress bar, which can be disabled
|
||||||
|
|||||||
@@ -1,21 +1,27 @@
|
|||||||
from auditlog.models import LogEntry
|
from auditlog.models import LogEntry
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
from tqdm import tqdm
|
||||||
|
|
||||||
from documents.management.commands.base import PaperlessCommand
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
|
|
||||||
|
|
||||||
class Command(PaperlessCommand):
|
class Command(BaseCommand, ProgressBarMixin):
|
||||||
"""Prune the audit logs of objects that no longer exist."""
|
"""
|
||||||
|
Prune the audit logs of objects that no longer exist.
|
||||||
|
"""
|
||||||
|
|
||||||
help = "Prunes the audit logs of objects that no longer exist."
|
help = "Prunes the audit logs of objects that no longer exist."
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def add_arguments(self, parser):
|
||||||
|
self.add_argument_progress_bar_mixin(parser)
|
||||||
|
|
||||||
|
def handle(self, **options):
|
||||||
|
self.handle_progress_bar_mixin(**options)
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
for log_entry in self.track(
|
for log_entry in tqdm(LogEntry.objects.all(), disable=self.no_progress_bar):
|
||||||
LogEntry.objects.all(),
|
|
||||||
description="Pruning audit logs...",
|
|
||||||
):
|
|
||||||
model_class = log_entry.content_type.model_class()
|
model_class = log_entry.content_type.model_class()
|
||||||
|
# use global_objects for SoftDeleteModel
|
||||||
objects = (
|
objects = (
|
||||||
model_class.global_objects
|
model_class.global_objects
|
||||||
if hasattr(model_class, "global_objects")
|
if hasattr(model_class, "global_objects")
|
||||||
@@ -26,8 +32,8 @@ class Command(PaperlessCommand):
|
|||||||
and not objects.filter(pk=log_entry.object_id).exists()
|
and not objects.filter(pk=log_entry.object_id).exists()
|
||||||
):
|
):
|
||||||
log_entry.delete()
|
log_entry.delete()
|
||||||
self.console.print(
|
tqdm.write(
|
||||||
f"Deleted audit log entry for "
|
self.style.NOTICE(
|
||||||
f"{model_class.__name__} #{log_entry.object_id}",
|
f"Deleted audit log entry for {model_class.__name__} #{log_entry.object_id}",
|
||||||
style="yellow",
|
),
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from types import TracebackType
|
|||||||
try:
|
try:
|
||||||
from typing import Self
|
from typing import Self
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from typing_extensions import Self
|
from typing import Self
|
||||||
|
|
||||||
import dateparser
|
import dateparser
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ if TYPE_CHECKING:
|
|||||||
from channels_redis.pubsub import RedisPubSubChannelLayer
|
from channels_redis.pubsub import RedisPubSubChannelLayer
|
||||||
|
|
||||||
|
|
||||||
class ProgressStatusOptions(str, enum.Enum):
|
class ProgressStatusOptions(enum.StrEnum):
|
||||||
STARTED = "STARTED"
|
STARTED = "STARTED"
|
||||||
WORKING = "WORKING"
|
WORKING = "WORKING"
|
||||||
SUCCESS = "SUCCESS"
|
SUCCESS = "SUCCESS"
|
||||||
|
|||||||
@@ -80,7 +80,6 @@ from documents.parsers import is_mime_type_supported
|
|||||||
from documents.permissions import get_document_count_filter_for_user
|
from documents.permissions import get_document_count_filter_for_user
|
||||||
from documents.permissions import get_groups_with_only_permission
|
from documents.permissions import get_groups_with_only_permission
|
||||||
from documents.permissions import get_objects_for_user_owner_aware
|
from documents.permissions import get_objects_for_user_owner_aware
|
||||||
from documents.permissions import has_perms_owner_aware
|
|
||||||
from documents.permissions import set_permissions_for_object
|
from documents.permissions import set_permissions_for_object
|
||||||
from documents.regex import validate_regex_pattern
|
from documents.regex import validate_regex_pattern
|
||||||
from documents.templating.filepath import validate_filepath_template_and_render
|
from documents.templating.filepath import validate_filepath_template_and_render
|
||||||
@@ -2322,17 +2321,6 @@ class ShareLinkSerializer(OwnedObjectSerializer):
|
|||||||
validated_data["slug"] = get_random_string(50)
|
validated_data["slug"] = get_random_string(50)
|
||||||
return super().create(validated_data)
|
return super().create(validated_data)
|
||||||
|
|
||||||
def validate_document(self, document):
|
|
||||||
if self.user is not None and has_perms_owner_aware(
|
|
||||||
self.user,
|
|
||||||
"view_document",
|
|
||||||
document,
|
|
||||||
):
|
|
||||||
return document
|
|
||||||
raise PermissionDenied(
|
|
||||||
_("Insufficient permissions."),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ShareLinkBundleSerializer(OwnedObjectSerializer):
|
class ShareLinkBundleSerializer(OwnedObjectSerializer):
|
||||||
document_ids = serializers.ListField(
|
document_ids = serializers.ListField(
|
||||||
|
|||||||
@@ -782,14 +782,6 @@ def run_workflows(
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
use_overrides = overrides is not None
|
use_overrides = overrides is not None
|
||||||
|
|
||||||
if isinstance(document, Document) and document.root_document_id is not None:
|
|
||||||
logger.debug(
|
|
||||||
"Skipping workflow execution for version document %s",
|
|
||||||
document.pk,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if original_file is None:
|
if original_file is None:
|
||||||
original_file = (
|
original_file = (
|
||||||
document.source_path if not use_overrides else document.original_file
|
document.source_path if not use_overrides else document.original_file
|
||||||
|
|||||||
@@ -452,22 +452,13 @@ def check_scheduled_workflows() -> None:
|
|||||||
|
|
||||||
match trigger.schedule_date_field:
|
match trigger.schedule_date_field:
|
||||||
case WorkflowTrigger.ScheduleDateField.ADDED:
|
case WorkflowTrigger.ScheduleDateField.ADDED:
|
||||||
documents = Document.objects.filter(
|
documents = Document.objects.filter(added__lte=threshold)
|
||||||
root_document__isnull=True,
|
|
||||||
added__lte=threshold,
|
|
||||||
)
|
|
||||||
|
|
||||||
case WorkflowTrigger.ScheduleDateField.CREATED:
|
case WorkflowTrigger.ScheduleDateField.CREATED:
|
||||||
documents = Document.objects.filter(
|
documents = Document.objects.filter(created__lte=threshold)
|
||||||
root_document__isnull=True,
|
|
||||||
created__lte=threshold,
|
|
||||||
)
|
|
||||||
|
|
||||||
case WorkflowTrigger.ScheduleDateField.MODIFIED:
|
case WorkflowTrigger.ScheduleDateField.MODIFIED:
|
||||||
documents = Document.objects.filter(
|
documents = Document.objects.filter(modified__lte=threshold)
|
||||||
root_document__isnull=True,
|
|
||||||
modified__lte=threshold,
|
|
||||||
)
|
|
||||||
|
|
||||||
case WorkflowTrigger.ScheduleDateField.CUSTOM_FIELD:
|
case WorkflowTrigger.ScheduleDateField.CUSTOM_FIELD:
|
||||||
# cap earliest date to avoid massive scans
|
# cap earliest date to avoid massive scans
|
||||||
@@ -505,10 +496,7 @@ def check_scheduled_workflows() -> None:
|
|||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
documents = Document.objects.filter(
|
documents = Document.objects.filter(id__in=matched_ids)
|
||||||
root_document__isnull=True,
|
|
||||||
id__in=matched_ids,
|
|
||||||
)
|
|
||||||
|
|
||||||
if documents.count() > 0:
|
if documents.count() > 0:
|
||||||
documents = prefilter_documents_by_workflowtrigger(
|
documents = prefilter_documents_by_workflowtrigger(
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ def base_config() -> DateParserConfig:
|
|||||||
12,
|
12,
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
tzinfo=datetime.timezone.utc,
|
tzinfo=datetime.UTC,
|
||||||
),
|
),
|
||||||
filename_date_order="YMD",
|
filename_date_order="YMD",
|
||||||
content_date_order="DMY",
|
content_date_order="DMY",
|
||||||
@@ -45,7 +45,7 @@ def config_with_ignore_dates() -> DateParserConfig:
|
|||||||
12,
|
12,
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
tzinfo=datetime.timezone.utc,
|
tzinfo=datetime.UTC,
|
||||||
),
|
),
|
||||||
filename_date_order="DMY",
|
filename_date_order="DMY",
|
||||||
content_date_order="MDY",
|
content_date_order="MDY",
|
||||||
|
|||||||
@@ -101,50 +101,50 @@ class TestFilterDate:
|
|||||||
[
|
[
|
||||||
# Valid Dates
|
# Valid Dates
|
||||||
pytest.param(
|
pytest.param(
|
||||||
datetime.datetime(2024, 1, 10, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2024, 1, 10, tzinfo=datetime.UTC),
|
||||||
datetime.datetime(2024, 1, 10, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2024, 1, 10, tzinfo=datetime.UTC),
|
||||||
id="valid_past_date",
|
id="valid_past_date",
|
||||||
),
|
),
|
||||||
pytest.param(
|
pytest.param(
|
||||||
datetime.datetime(2024, 1, 15, 12, 0, 0, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2024, 1, 15, 12, 0, 0, tzinfo=datetime.UTC),
|
||||||
datetime.datetime(2024, 1, 15, 12, 0, 0, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2024, 1, 15, 12, 0, 0, tzinfo=datetime.UTC),
|
||||||
id="exactly_at_reference",
|
id="exactly_at_reference",
|
||||||
),
|
),
|
||||||
pytest.param(
|
pytest.param(
|
||||||
datetime.datetime(1901, 1, 1, tzinfo=datetime.timezone.utc),
|
datetime.datetime(1901, 1, 1, tzinfo=datetime.UTC),
|
||||||
datetime.datetime(1901, 1, 1, tzinfo=datetime.timezone.utc),
|
datetime.datetime(1901, 1, 1, tzinfo=datetime.UTC),
|
||||||
id="year_1901_valid",
|
id="year_1901_valid",
|
||||||
),
|
),
|
||||||
# Date is > reference_time
|
# Date is > reference_time
|
||||||
pytest.param(
|
pytest.param(
|
||||||
datetime.datetime(2024, 1, 16, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2024, 1, 16, tzinfo=datetime.UTC),
|
||||||
None,
|
None,
|
||||||
id="future_date_day_after",
|
id="future_date_day_after",
|
||||||
),
|
),
|
||||||
# date.date() in ignore_dates
|
# date.date() in ignore_dates
|
||||||
pytest.param(
|
pytest.param(
|
||||||
datetime.datetime(2024, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2024, 1, 1, 0, 0, 0, tzinfo=datetime.UTC),
|
||||||
None,
|
None,
|
||||||
id="ignored_date_midnight_jan1",
|
id="ignored_date_midnight_jan1",
|
||||||
),
|
),
|
||||||
pytest.param(
|
pytest.param(
|
||||||
datetime.datetime(2024, 1, 1, 10, 30, 0, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2024, 1, 1, 10, 30, 0, tzinfo=datetime.UTC),
|
||||||
None,
|
None,
|
||||||
id="ignored_date_midday_jan1",
|
id="ignored_date_midday_jan1",
|
||||||
),
|
),
|
||||||
pytest.param(
|
pytest.param(
|
||||||
datetime.datetime(2024, 12, 25, 15, 0, 0, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2024, 12, 25, 15, 0, 0, tzinfo=datetime.UTC),
|
||||||
None,
|
None,
|
||||||
id="ignored_date_dec25_future",
|
id="ignored_date_dec25_future",
|
||||||
),
|
),
|
||||||
# date.year <= 1900
|
# date.year <= 1900
|
||||||
pytest.param(
|
pytest.param(
|
||||||
datetime.datetime(1899, 12, 31, tzinfo=datetime.timezone.utc),
|
datetime.datetime(1899, 12, 31, tzinfo=datetime.UTC),
|
||||||
None,
|
None,
|
||||||
id="year_1899",
|
id="year_1899",
|
||||||
),
|
),
|
||||||
pytest.param(
|
pytest.param(
|
||||||
datetime.datetime(1900, 1, 1, tzinfo=datetime.timezone.utc),
|
datetime.datetime(1900, 1, 1, tzinfo=datetime.UTC),
|
||||||
None,
|
None,
|
||||||
id="year_1900_boundary",
|
id="year_1900_boundary",
|
||||||
),
|
),
|
||||||
@@ -176,7 +176,7 @@ class TestFilterDate:
|
|||||||
1,
|
1,
|
||||||
12,
|
12,
|
||||||
0,
|
0,
|
||||||
tzinfo=datetime.timezone.utc,
|
tzinfo=datetime.UTC,
|
||||||
)
|
)
|
||||||
another_ignored = datetime.datetime(
|
another_ignored = datetime.datetime(
|
||||||
2024,
|
2024,
|
||||||
@@ -184,7 +184,7 @@ class TestFilterDate:
|
|||||||
25,
|
25,
|
||||||
15,
|
15,
|
||||||
30,
|
30,
|
||||||
tzinfo=datetime.timezone.utc,
|
tzinfo=datetime.UTC,
|
||||||
)
|
)
|
||||||
allowed_date = datetime.datetime(
|
allowed_date = datetime.datetime(
|
||||||
2024,
|
2024,
|
||||||
@@ -192,7 +192,7 @@ class TestFilterDate:
|
|||||||
2,
|
2,
|
||||||
12,
|
12,
|
||||||
0,
|
0,
|
||||||
tzinfo=datetime.timezone.utc,
|
tzinfo=datetime.UTC,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert parser._filter_date(ignored_date) is None
|
assert parser._filter_date(ignored_date) is None
|
||||||
@@ -204,7 +204,7 @@ class TestFilterDate:
|
|||||||
regex_parser: RegexDateParserPlugin,
|
regex_parser: RegexDateParserPlugin,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Should work with timezone-aware datetimes."""
|
"""Should work with timezone-aware datetimes."""
|
||||||
date_utc = datetime.datetime(2024, 1, 10, 12, 0, tzinfo=datetime.timezone.utc)
|
date_utc = datetime.datetime(2024, 1, 10, 12, 0, tzinfo=datetime.UTC)
|
||||||
|
|
||||||
result = regex_parser._filter_date(date_utc)
|
result = regex_parser._filter_date(date_utc)
|
||||||
|
|
||||||
@@ -221,8 +221,8 @@ class TestRegexDateParser:
|
|||||||
"report-2023-12-25.txt",
|
"report-2023-12-25.txt",
|
||||||
"Event recorded on 25/12/2022.",
|
"Event recorded on 25/12/2022.",
|
||||||
[
|
[
|
||||||
datetime.datetime(2023, 12, 25, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2023, 12, 25, tzinfo=datetime.UTC),
|
||||||
datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2022, 12, 25, tzinfo=datetime.UTC),
|
||||||
],
|
],
|
||||||
id="filename-y-m-d_and_content-d-m-y",
|
id="filename-y-m-d_and_content-d-m-y",
|
||||||
),
|
),
|
||||||
@@ -230,8 +230,8 @@ class TestRegexDateParser:
|
|||||||
"img_2023.01.02.jpg",
|
"img_2023.01.02.jpg",
|
||||||
"Taken on 01/02/2023",
|
"Taken on 01/02/2023",
|
||||||
[
|
[
|
||||||
datetime.datetime(2023, 1, 2, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2023, 1, 2, tzinfo=datetime.UTC),
|
||||||
datetime.datetime(2023, 2, 1, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2023, 2, 1, tzinfo=datetime.UTC),
|
||||||
],
|
],
|
||||||
id="ambiguous-dates-respect-orders",
|
id="ambiguous-dates-respect-orders",
|
||||||
),
|
),
|
||||||
@@ -239,7 +239,7 @@ class TestRegexDateParser:
|
|||||||
"notes.txt",
|
"notes.txt",
|
||||||
"bad date 99/99/9999 and 25/12/2022",
|
"bad date 99/99/9999 and 25/12/2022",
|
||||||
[
|
[
|
||||||
datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2022, 12, 25, tzinfo=datetime.UTC),
|
||||||
],
|
],
|
||||||
id="parse-exception-skips-bad-and-yields-good",
|
id="parse-exception-skips-bad-and-yields-good",
|
||||||
),
|
),
|
||||||
@@ -275,24 +275,24 @@ class TestRegexDateParser:
|
|||||||
or "2023.12.25" in date_string
|
or "2023.12.25" in date_string
|
||||||
or "2023-12-25" in date_string
|
or "2023-12-25" in date_string
|
||||||
):
|
):
|
||||||
return datetime.datetime(2023, 12, 25, tzinfo=datetime.timezone.utc)
|
return datetime.datetime(2023, 12, 25, tzinfo=datetime.UTC)
|
||||||
|
|
||||||
# content DMY 25/12/2022
|
# content DMY 25/12/2022
|
||||||
if "25/12/2022" in date_string or "25-12-2022" in date_string:
|
if "25/12/2022" in date_string or "25-12-2022" in date_string:
|
||||||
return datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc)
|
return datetime.datetime(2022, 12, 25, tzinfo=datetime.UTC)
|
||||||
|
|
||||||
# filename YMD 2023.01.02
|
# filename YMD 2023.01.02
|
||||||
if "2023.01.02" in date_string or "2023-01-02" in date_string:
|
if "2023.01.02" in date_string or "2023-01-02" in date_string:
|
||||||
return datetime.datetime(2023, 1, 2, tzinfo=datetime.timezone.utc)
|
return datetime.datetime(2023, 1, 2, tzinfo=datetime.UTC)
|
||||||
|
|
||||||
# ambiguous 01/02/2023 -> respect DATE_ORDER setting
|
# ambiguous 01/02/2023 -> respect DATE_ORDER setting
|
||||||
if "01/02/2023" in date_string:
|
if "01/02/2023" in date_string:
|
||||||
if date_order == "DMY":
|
if date_order == "DMY":
|
||||||
return datetime.datetime(2023, 2, 1, tzinfo=datetime.timezone.utc)
|
return datetime.datetime(2023, 2, 1, tzinfo=datetime.UTC)
|
||||||
if date_order == "YMD":
|
if date_order == "YMD":
|
||||||
return datetime.datetime(2023, 1, 2, tzinfo=datetime.timezone.utc)
|
return datetime.datetime(2023, 1, 2, tzinfo=datetime.UTC)
|
||||||
# fallback
|
# fallback
|
||||||
return datetime.datetime(2023, 2, 1, tzinfo=datetime.timezone.utc)
|
return datetime.datetime(2023, 2, 1, tzinfo=datetime.UTC)
|
||||||
|
|
||||||
# simulate parse failure for malformed input
|
# simulate parse failure for malformed input
|
||||||
if "99/99/9999" in date_string or "bad date" in date_string:
|
if "99/99/9999" in date_string or "bad date" in date_string:
|
||||||
@@ -328,7 +328,7 @@ class TestRegexDateParser:
|
|||||||
12,
|
12,
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
tzinfo=datetime.timezone.utc,
|
tzinfo=datetime.UTC,
|
||||||
),
|
),
|
||||||
filename_date_order="YMD",
|
filename_date_order="YMD",
|
||||||
content_date_order="DMY",
|
content_date_order="DMY",
|
||||||
@@ -344,13 +344,13 @@ class TestRegexDateParser:
|
|||||||
) -> datetime.datetime | None:
|
) -> datetime.datetime | None:
|
||||||
if "10/12/2023" in date_string or "10-12-2023" in date_string:
|
if "10/12/2023" in date_string or "10-12-2023" in date_string:
|
||||||
# ignored date
|
# ignored date
|
||||||
return datetime.datetime(2023, 12, 10, tzinfo=datetime.timezone.utc)
|
return datetime.datetime(2023, 12, 10, tzinfo=datetime.UTC)
|
||||||
if "01/02/2024" in date_string or "01-02-2024" in date_string:
|
if "01/02/2024" in date_string or "01-02-2024" in date_string:
|
||||||
# future relative to reference_time -> filtered
|
# future relative to reference_time -> filtered
|
||||||
return datetime.datetime(2024, 2, 1, tzinfo=datetime.timezone.utc)
|
return datetime.datetime(2024, 2, 1, tzinfo=datetime.UTC)
|
||||||
if "05/01/2023" in date_string or "05-01-2023" in date_string:
|
if "05/01/2023" in date_string or "05-01-2023" in date_string:
|
||||||
# valid
|
# valid
|
||||||
return datetime.datetime(2023, 1, 5, tzinfo=datetime.timezone.utc)
|
return datetime.datetime(2023, 1, 5, tzinfo=datetime.UTC)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
mocker.patch(target, side_effect=fake_parse)
|
mocker.patch(target, side_effect=fake_parse)
|
||||||
@@ -358,7 +358,7 @@ class TestRegexDateParser:
|
|||||||
content = "Ignored: 10/12/2023, Future: 01/02/2024, Keep: 05/01/2023"
|
content = "Ignored: 10/12/2023, Future: 01/02/2024, Keep: 05/01/2023"
|
||||||
results = list(parser.parse("whatever.txt", content))
|
results = list(parser.parse("whatever.txt", content))
|
||||||
|
|
||||||
assert results == [datetime.datetime(2023, 1, 5, tzinfo=datetime.timezone.utc)]
|
assert results == [datetime.datetime(2023, 1, 5, tzinfo=datetime.UTC)]
|
||||||
|
|
||||||
def test_parse_handles_no_matches_and_returns_empty_list(
|
def test_parse_handles_no_matches_and_returns_empty_list(
|
||||||
self,
|
self,
|
||||||
@@ -392,7 +392,7 @@ class TestRegexDateParser:
|
|||||||
12,
|
12,
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
tzinfo=datetime.timezone.utc,
|
tzinfo=datetime.UTC,
|
||||||
),
|
),
|
||||||
filename_date_order=None,
|
filename_date_order=None,
|
||||||
content_date_order="DMY",
|
content_date_order="DMY",
|
||||||
@@ -409,9 +409,9 @@ class TestRegexDateParser:
|
|||||||
) -> datetime.datetime | None:
|
) -> datetime.datetime | None:
|
||||||
# return distinct datetimes so we can tell which source was parsed
|
# return distinct datetimes so we can tell which source was parsed
|
||||||
if "25/12/2022" in date_string:
|
if "25/12/2022" in date_string:
|
||||||
return datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc)
|
return datetime.datetime(2022, 12, 25, tzinfo=datetime.UTC)
|
||||||
if "2023-12-25" in date_string:
|
if "2023-12-25" in date_string:
|
||||||
return datetime.datetime(2023, 12, 25, tzinfo=datetime.timezone.utc)
|
return datetime.datetime(2023, 12, 25, tzinfo=datetime.UTC)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
mock = mocker.patch(target, side_effect=fake_parse)
|
mock = mocker.patch(target, side_effect=fake_parse)
|
||||||
@@ -429,5 +429,5 @@ class TestRegexDateParser:
|
|||||||
assert "25/12/2022" in called_date_string
|
assert "25/12/2022" in called_date_string
|
||||||
# And the parser should have yielded the corresponding datetime
|
# And the parser should have yielded the corresponding datetime
|
||||||
assert results == [
|
assert results == [
|
||||||
datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc),
|
datetime.datetime(2022, 12, 25, tzinfo=datetime.UTC),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,518 +0,0 @@
|
|||||||
"""Tests for PaperlessCommand base class."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import io
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from django.core.management import CommandError
|
|
||||||
from django.db.models import QuerySet
|
|
||||||
from rich.console import Console
|
|
||||||
|
|
||||||
from documents.management.commands.base import PaperlessCommand
|
|
||||||
from documents.management.commands.base import ProcessResult
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from pytest_mock import MockerFixture
|
|
||||||
|
|
||||||
|
|
||||||
# --- Test Commands ---
|
|
||||||
# These simulate real command implementations for testing
|
|
||||||
|
|
||||||
|
|
||||||
class SimpleCommand(PaperlessCommand):
|
|
||||||
"""Command with default settings (progress bar, no multiprocessing)."""
|
|
||||||
|
|
||||||
help = "Simple test command"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
items = list(range(5))
|
|
||||||
results = []
|
|
||||||
for item in self.track(items, description="Processing..."):
|
|
||||||
results.append(item * 2)
|
|
||||||
self.stdout.write(f"Results: {results}")
|
|
||||||
|
|
||||||
|
|
||||||
class NoProgressBarCommand(PaperlessCommand):
|
|
||||||
"""Command with progress bar disabled."""
|
|
||||||
|
|
||||||
help = "No progress bar command"
|
|
||||||
supports_progress_bar = False
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
items = list(range(3))
|
|
||||||
for _ in self.track(items):
|
|
||||||
# We don't need to actually work
|
|
||||||
pass
|
|
||||||
self.stdout.write("Done")
|
|
||||||
|
|
||||||
|
|
||||||
class MultiprocessCommand(PaperlessCommand):
|
|
||||||
"""Command with multiprocessing support."""
|
|
||||||
|
|
||||||
help = "Multiprocess test command"
|
|
||||||
supports_multiprocessing = True
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
items = list(range(5))
|
|
||||||
results = []
|
|
||||||
for result in self.process_parallel(
|
|
||||||
_double_value,
|
|
||||||
items,
|
|
||||||
description="Processing...",
|
|
||||||
):
|
|
||||||
results.append(result)
|
|
||||||
successes = sum(1 for r in results if r.success)
|
|
||||||
self.stdout.write(f"Successes: {successes}")
|
|
||||||
|
|
||||||
|
|
||||||
# --- Helper Functions for Multiprocessing ---
|
|
||||||
# Must be at module level to be picklable
|
|
||||||
|
|
||||||
|
|
||||||
def _double_value(x: int) -> int:
|
|
||||||
"""Double the input value."""
|
|
||||||
return x * 2
|
|
||||||
|
|
||||||
|
|
||||||
def _divide_ten_by(x: int) -> float:
|
|
||||||
"""Divide 10 by x. Raises ZeroDivisionError if x is 0."""
|
|
||||||
return 10 / x
|
|
||||||
|
|
||||||
|
|
||||||
# --- Fixtures ---
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def console() -> Console:
|
|
||||||
"""Create a non-interactive console for testing."""
|
|
||||||
return Console(force_terminal=False, force_interactive=False)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def simple_command(console: Console) -> SimpleCommand:
|
|
||||||
"""Create a SimpleCommand instance configured for testing."""
|
|
||||||
command = SimpleCommand()
|
|
||||||
command.stdout = io.StringIO()
|
|
||||||
command.stderr = io.StringIO()
|
|
||||||
command.console = console
|
|
||||||
command.no_progress_bar = True
|
|
||||||
command.process_count = 1
|
|
||||||
return command
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def multiprocess_command(console: Console) -> MultiprocessCommand:
|
|
||||||
"""Create a MultiprocessCommand instance configured for testing."""
|
|
||||||
command = MultiprocessCommand()
|
|
||||||
command.stdout = io.StringIO()
|
|
||||||
command.stderr = io.StringIO()
|
|
||||||
command.console = console
|
|
||||||
command.no_progress_bar = True
|
|
||||||
command.process_count = 1
|
|
||||||
return command
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_queryset():
|
|
||||||
"""
|
|
||||||
Create a mock Django QuerySet that tracks method calls.
|
|
||||||
|
|
||||||
This verifies we use .count() instead of len() for querysets.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class MockQuerySet(QuerySet):
|
|
||||||
def __init__(self, items: list):
|
|
||||||
self._items = items
|
|
||||||
self.count_called = False
|
|
||||||
|
|
||||||
def count(self) -> int:
|
|
||||||
self.count_called = True
|
|
||||||
return len(self._items)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return iter(self._items)
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
raise AssertionError("len() should not be called on querysets")
|
|
||||||
|
|
||||||
return MockQuerySet
|
|
||||||
|
|
||||||
|
|
||||||
# --- Test Classes ---
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestProcessResult:
|
|
||||||
"""Tests for the ProcessResult dataclass."""
|
|
||||||
|
|
||||||
def test_success_result(self):
|
|
||||||
result = ProcessResult(item=1, result=2, error=None)
|
|
||||||
|
|
||||||
assert result.item == 1
|
|
||||||
assert result.result == 2
|
|
||||||
assert result.error is None
|
|
||||||
assert result.success is True
|
|
||||||
|
|
||||||
def test_error_result(self):
|
|
||||||
error = ValueError("test error")
|
|
||||||
result = ProcessResult(item=1, result=None, error=error)
|
|
||||||
|
|
||||||
assert result.item == 1
|
|
||||||
assert result.result is None
|
|
||||||
assert result.error is error
|
|
||||||
assert result.success is False
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestPaperlessCommandArguments:
|
|
||||||
"""Tests for argument parsing behavior."""
|
|
||||||
|
|
||||||
def test_progress_bar_argument_added_by_default(self):
|
|
||||||
command = SimpleCommand()
|
|
||||||
parser = command.create_parser("manage.py", "simple")
|
|
||||||
|
|
||||||
options = parser.parse_args(["--no-progress-bar"])
|
|
||||||
assert options.no_progress_bar is True
|
|
||||||
|
|
||||||
options = parser.parse_args([])
|
|
||||||
assert options.no_progress_bar is False
|
|
||||||
|
|
||||||
def test_progress_bar_argument_not_added_when_disabled(self):
|
|
||||||
command = NoProgressBarCommand()
|
|
||||||
parser = command.create_parser("manage.py", "noprogress")
|
|
||||||
|
|
||||||
options = parser.parse_args([])
|
|
||||||
assert not hasattr(options, "no_progress_bar")
|
|
||||||
|
|
||||||
def test_processes_argument_added_when_multiprocessing_enabled(self):
|
|
||||||
command = MultiprocessCommand()
|
|
||||||
parser = command.create_parser("manage.py", "multiprocess")
|
|
||||||
|
|
||||||
options = parser.parse_args(["--processes", "4"])
|
|
||||||
assert options.processes == 4
|
|
||||||
|
|
||||||
options = parser.parse_args([])
|
|
||||||
assert options.processes >= 1
|
|
||||||
|
|
||||||
def test_processes_argument_not_added_when_multiprocessing_disabled(self):
|
|
||||||
command = SimpleCommand()
|
|
||||||
parser = command.create_parser("manage.py", "simple")
|
|
||||||
|
|
||||||
options = parser.parse_args([])
|
|
||||||
assert not hasattr(options, "processes")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestPaperlessCommandExecute:
|
|
||||||
"""Tests for the execute() setup behavior."""
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def base_options(self) -> dict:
|
|
||||||
"""Base options required for execute()."""
|
|
||||||
return {
|
|
||||||
"verbosity": 1,
|
|
||||||
"no_color": True,
|
|
||||||
"force_color": False,
|
|
||||||
"skip_checks": True,
|
|
||||||
}
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("no_progress_bar_flag", "expected"),
|
|
||||||
[
|
|
||||||
pytest.param(False, False, id="progress-bar-enabled"),
|
|
||||||
pytest.param(True, True, id="progress-bar-disabled"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_no_progress_bar_state_set(
|
|
||||||
self,
|
|
||||||
base_options: dict,
|
|
||||||
*,
|
|
||||||
no_progress_bar_flag: bool,
|
|
||||||
expected: bool,
|
|
||||||
):
|
|
||||||
command = SimpleCommand()
|
|
||||||
command.stdout = io.StringIO()
|
|
||||||
command.stderr = io.StringIO()
|
|
||||||
|
|
||||||
options = {**base_options, "no_progress_bar": no_progress_bar_flag}
|
|
||||||
command.execute(**options)
|
|
||||||
|
|
||||||
assert command.no_progress_bar is expected
|
|
||||||
|
|
||||||
def test_no_progress_bar_always_true_when_not_supported(self, base_options: dict):
|
|
||||||
command = NoProgressBarCommand()
|
|
||||||
command.stdout = io.StringIO()
|
|
||||||
command.stderr = io.StringIO()
|
|
||||||
|
|
||||||
command.execute(**base_options)
|
|
||||||
|
|
||||||
assert command.no_progress_bar is True
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("processes", "expected"),
|
|
||||||
[
|
|
||||||
pytest.param(1, 1, id="single-process"),
|
|
||||||
pytest.param(4, 4, id="four-processes"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_process_count_set(
|
|
||||||
self,
|
|
||||||
base_options: dict,
|
|
||||||
processes: int,
|
|
||||||
expected: int,
|
|
||||||
):
|
|
||||||
command = MultiprocessCommand()
|
|
||||||
command.stdout = io.StringIO()
|
|
||||||
command.stderr = io.StringIO()
|
|
||||||
|
|
||||||
options = {**base_options, "processes": processes, "no_progress_bar": True}
|
|
||||||
command.execute(**options)
|
|
||||||
|
|
||||||
assert command.process_count == expected
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"invalid_count",
|
|
||||||
[
|
|
||||||
pytest.param(0, id="zero"),
|
|
||||||
pytest.param(-1, id="negative"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_process_count_validation_rejects_invalid(
|
|
||||||
self,
|
|
||||||
base_options: dict,
|
|
||||||
invalid_count: int,
|
|
||||||
):
|
|
||||||
command = MultiprocessCommand()
|
|
||||||
command.stdout = io.StringIO()
|
|
||||||
command.stderr = io.StringIO()
|
|
||||||
|
|
||||||
options = {**base_options, "processes": invalid_count, "no_progress_bar": True}
|
|
||||||
|
|
||||||
with pytest.raises(CommandError, match="--processes must be at least 1"):
|
|
||||||
command.execute(**options)
|
|
||||||
|
|
||||||
def test_process_count_defaults_to_one_when_not_supported(self, base_options: dict):
|
|
||||||
command = SimpleCommand()
|
|
||||||
command.stdout = io.StringIO()
|
|
||||||
command.stderr = io.StringIO()
|
|
||||||
|
|
||||||
options = {**base_options, "no_progress_bar": True}
|
|
||||||
command.execute(**options)
|
|
||||||
|
|
||||||
assert command.process_count == 1
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestGetIterableLength:
|
|
||||||
"""Tests for the _get_iterable_length() method."""
|
|
||||||
|
|
||||||
def test_uses_count_for_querysets(
|
|
||||||
self,
|
|
||||||
simple_command: SimpleCommand,
|
|
||||||
mock_queryset,
|
|
||||||
):
|
|
||||||
"""Should call .count() on Django querysets rather than len()."""
|
|
||||||
queryset = mock_queryset([1, 2, 3, 4, 5])
|
|
||||||
|
|
||||||
result = simple_command._get_iterable_length(queryset)
|
|
||||||
|
|
||||||
assert result == 5
|
|
||||||
assert queryset.count_called is True
|
|
||||||
|
|
||||||
def test_uses_len_for_sized(self, simple_command: SimpleCommand):
|
|
||||||
"""Should use len() for sequences and other Sized types."""
|
|
||||||
result = simple_command._get_iterable_length([1, 2, 3, 4])
|
|
||||||
|
|
||||||
assert result == 4
|
|
||||||
|
|
||||||
def test_returns_none_for_unsized_iterables(self, simple_command: SimpleCommand):
|
|
||||||
"""Should return None for generators and other iterables without len()."""
|
|
||||||
result = simple_command._get_iterable_length(x for x in [1, 2, 3])
|
|
||||||
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestTrack:
|
|
||||||
"""Tests for the track() method."""
|
|
||||||
|
|
||||||
def test_with_progress_bar_disabled(self, simple_command: SimpleCommand):
|
|
||||||
simple_command.no_progress_bar = True
|
|
||||||
items = ["a", "b", "c"]
|
|
||||||
|
|
||||||
result = list(simple_command.track(items, description="Test..."))
|
|
||||||
|
|
||||||
assert result == items
|
|
||||||
|
|
||||||
def test_with_progress_bar_enabled(self, simple_command: SimpleCommand):
|
|
||||||
simple_command.no_progress_bar = False
|
|
||||||
items = [1, 2, 3]
|
|
||||||
|
|
||||||
result = list(simple_command.track(items, description="Processing..."))
|
|
||||||
|
|
||||||
assert result == items
|
|
||||||
|
|
||||||
def test_with_explicit_total(self, simple_command: SimpleCommand):
|
|
||||||
simple_command.no_progress_bar = False
|
|
||||||
|
|
||||||
def gen():
|
|
||||||
yield from [1, 2, 3]
|
|
||||||
|
|
||||||
result = list(simple_command.track(gen(), total=3))
|
|
||||||
|
|
||||||
assert result == [1, 2, 3]
|
|
||||||
|
|
||||||
def test_with_generator_no_total(self, simple_command: SimpleCommand):
|
|
||||||
def gen():
|
|
||||||
yield from [1, 2, 3]
|
|
||||||
|
|
||||||
result = list(simple_command.track(gen()))
|
|
||||||
|
|
||||||
assert result == [1, 2, 3]
|
|
||||||
|
|
||||||
def test_empty_iterable(self, simple_command: SimpleCommand):
|
|
||||||
result = list(simple_command.track([]))
|
|
||||||
|
|
||||||
assert result == []
|
|
||||||
|
|
||||||
def test_uses_queryset_count(
|
|
||||||
self,
|
|
||||||
simple_command: SimpleCommand,
|
|
||||||
mock_queryset,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
):
|
|
||||||
"""Verify track() uses .count() for querysets."""
|
|
||||||
simple_command.no_progress_bar = False
|
|
||||||
queryset = mock_queryset([1, 2, 3])
|
|
||||||
|
|
||||||
spy = mocker.spy(simple_command, "_get_iterable_length")
|
|
||||||
|
|
||||||
result = list(simple_command.track(queryset))
|
|
||||||
|
|
||||||
assert result == [1, 2, 3]
|
|
||||||
spy.assert_called_once_with(queryset)
|
|
||||||
assert queryset.count_called is True
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestProcessParallel:
|
|
||||||
"""Tests for the process_parallel() method."""
|
|
||||||
|
|
||||||
def test_sequential_processing_single_process(
|
|
||||||
self,
|
|
||||||
multiprocess_command: MultiprocessCommand,
|
|
||||||
):
|
|
||||||
multiprocess_command.process_count = 1
|
|
||||||
items = [1, 2, 3, 4, 5]
|
|
||||||
|
|
||||||
results = list(multiprocess_command.process_parallel(_double_value, items))
|
|
||||||
|
|
||||||
assert len(results) == 5
|
|
||||||
assert all(r.success for r in results)
|
|
||||||
|
|
||||||
result_map = {r.item: r.result for r in results}
|
|
||||||
assert result_map == {1: 2, 2: 4, 3: 6, 4: 8, 5: 10}
|
|
||||||
|
|
||||||
def test_sequential_processing_handles_errors(
|
|
||||||
self,
|
|
||||||
multiprocess_command: MultiprocessCommand,
|
|
||||||
):
|
|
||||||
multiprocess_command.process_count = 1
|
|
||||||
items = [1, 2, 0, 4] # 0 causes ZeroDivisionError
|
|
||||||
|
|
||||||
results = list(multiprocess_command.process_parallel(_divide_ten_by, items))
|
|
||||||
|
|
||||||
assert len(results) == 4
|
|
||||||
|
|
||||||
successes = [r for r in results if r.success]
|
|
||||||
failures = [r for r in results if not r.success]
|
|
||||||
|
|
||||||
assert len(successes) == 3
|
|
||||||
assert len(failures) == 1
|
|
||||||
assert failures[0].item == 0
|
|
||||||
assert isinstance(failures[0].error, ZeroDivisionError)
|
|
||||||
|
|
||||||
def test_parallel_closes_db_connections(
|
|
||||||
self,
|
|
||||||
multiprocess_command: MultiprocessCommand,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
):
|
|
||||||
multiprocess_command.process_count = 2
|
|
||||||
items = [1, 2, 3]
|
|
||||||
|
|
||||||
mock_connections = mocker.patch(
|
|
||||||
"documents.management.commands.base.db.connections",
|
|
||||||
)
|
|
||||||
|
|
||||||
results = list(multiprocess_command.process_parallel(_double_value, items))
|
|
||||||
|
|
||||||
mock_connections.close_all.assert_called_once()
|
|
||||||
assert len(results) == 3
|
|
||||||
|
|
||||||
def test_parallel_processing_handles_errors(
|
|
||||||
self,
|
|
||||||
multiprocess_command: MultiprocessCommand,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
):
|
|
||||||
multiprocess_command.process_count = 2
|
|
||||||
items = [1, 2, 0, 4]
|
|
||||||
|
|
||||||
mocker.patch("documents.management.commands.base.db.connections")
|
|
||||||
|
|
||||||
results = list(multiprocess_command.process_parallel(_divide_ten_by, items))
|
|
||||||
|
|
||||||
failures = [r for r in results if not r.success]
|
|
||||||
assert len(failures) == 1
|
|
||||||
assert failures[0].item == 0
|
|
||||||
|
|
||||||
def test_empty_items(self, multiprocess_command: MultiprocessCommand):
|
|
||||||
results = list(multiprocess_command.process_parallel(_double_value, []))
|
|
||||||
|
|
||||||
assert results == []
|
|
||||||
|
|
||||||
def test_result_contains_original_item(
|
|
||||||
self,
|
|
||||||
multiprocess_command: MultiprocessCommand,
|
|
||||||
):
|
|
||||||
items = [10, 20, 30]
|
|
||||||
|
|
||||||
results = list(multiprocess_command.process_parallel(_double_value, items))
|
|
||||||
|
|
||||||
for result in results:
|
|
||||||
assert result.item in items
|
|
||||||
assert result.result == result.item * 2
|
|
||||||
|
|
||||||
def test_sequential_path_used_for_single_process(
|
|
||||||
self,
|
|
||||||
multiprocess_command: MultiprocessCommand,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
):
|
|
||||||
"""Verify single process uses sequential path (important for testing)."""
|
|
||||||
multiprocess_command.process_count = 1
|
|
||||||
|
|
||||||
spy_sequential = mocker.spy(multiprocess_command, "_process_sequential")
|
|
||||||
spy_parallel = mocker.spy(multiprocess_command, "_process_parallel")
|
|
||||||
|
|
||||||
list(multiprocess_command.process_parallel(_double_value, [1, 2, 3]))
|
|
||||||
|
|
||||||
spy_sequential.assert_called_once()
|
|
||||||
spy_parallel.assert_not_called()
|
|
||||||
|
|
||||||
def test_parallel_path_used_for_multiple_processes(
|
|
||||||
self,
|
|
||||||
multiprocess_command: MultiprocessCommand,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
):
|
|
||||||
"""Verify multiple processes uses parallel path."""
|
|
||||||
multiprocess_command.process_count = 2
|
|
||||||
|
|
||||||
mocker.patch("documents.management.commands.base.db.connections")
|
|
||||||
spy_sequential = mocker.spy(multiprocess_command, "_process_sequential")
|
|
||||||
spy_parallel = mocker.spy(multiprocess_command, "_process_parallel")
|
|
||||||
|
|
||||||
list(multiprocess_command.process_parallel(_double_value, [1, 2, 3]))
|
|
||||||
|
|
||||||
spy_parallel.assert_called_once()
|
|
||||||
spy_sequential.assert_not_called()
|
|
||||||
@@ -773,22 +773,6 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_api_selection_data_requires_view_permission(self):
|
|
||||||
self.doc2.owner = self.user
|
|
||||||
self.doc2.save()
|
|
||||||
|
|
||||||
user1 = User.objects.create(username="user1")
|
|
||||||
self.client.force_authenticate(user=user1)
|
|
||||||
|
|
||||||
response = self.client.post(
|
|
||||||
"/api/documents/selection_data/",
|
|
||||||
json.dumps({"documents": [self.doc2.id]}),
|
|
||||||
content_type="application/json",
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
|
||||||
self.assertEqual(response.content, b"Insufficient permissions")
|
|
||||||
|
|
||||||
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
|
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
|
||||||
def test_set_permissions(self, m) -> None:
|
def test_set_permissions(self, m) -> None:
|
||||||
self.setup_mock(m, "set_permissions")
|
self.setup_mock(m, "set_permissions")
|
||||||
|
|||||||
@@ -2955,54 +2955,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||||
|
|
||||||
def test_create_share_link_requires_view_permission_for_document(self):
|
|
||||||
"""
|
|
||||||
GIVEN:
|
|
||||||
- A user with add_sharelink but without view permission on a document
|
|
||||||
WHEN:
|
|
||||||
- API request is made to create a share link for that document
|
|
||||||
THEN:
|
|
||||||
- Share link creation is denied until view permission is granted
|
|
||||||
"""
|
|
||||||
user1 = User.objects.create_user(username="test1")
|
|
||||||
user1.user_permissions.add(*Permission.objects.filter(codename="add_sharelink"))
|
|
||||||
user1.save()
|
|
||||||
|
|
||||||
user2 = User.objects.create_user(username="test2")
|
|
||||||
user2.save()
|
|
||||||
|
|
||||||
doc = Document.objects.create(
|
|
||||||
title="test",
|
|
||||||
mime_type="application/pdf",
|
|
||||||
content="this is a document which will be protected",
|
|
||||||
owner=user2,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.client.force_authenticate(user1)
|
|
||||||
|
|
||||||
create_resp = self.client.post(
|
|
||||||
"/api/share_links/",
|
|
||||||
data={
|
|
||||||
"document": doc.pk,
|
|
||||||
"file_version": "original",
|
|
||||||
},
|
|
||||||
format="json",
|
|
||||||
)
|
|
||||||
self.assertEqual(create_resp.status_code, status.HTTP_403_FORBIDDEN)
|
|
||||||
|
|
||||||
assign_perm("view_document", user1, doc)
|
|
||||||
|
|
||||||
create_resp = self.client.post(
|
|
||||||
"/api/share_links/",
|
|
||||||
data={
|
|
||||||
"document": doc.pk,
|
|
||||||
"file_version": "original",
|
|
||||||
},
|
|
||||||
format="json",
|
|
||||||
)
|
|
||||||
self.assertEqual(create_resp.status_code, status.HTTP_201_CREATED)
|
|
||||||
self.assertEqual(create_resp.data["document"], doc.pk)
|
|
||||||
|
|
||||||
def test_next_asn(self) -> None:
|
def test_next_asn(self) -> None:
|
||||||
"""
|
"""
|
||||||
GIVEN:
|
GIVEN:
|
||||||
|
|||||||
@@ -21,16 +21,6 @@ class TestApiUiSettings(DirectoriesMixin, APITestCase):
|
|||||||
self.test_user.save()
|
self.test_user.save()
|
||||||
self.client.force_authenticate(user=self.test_user)
|
self.client.force_authenticate(user=self.test_user)
|
||||||
|
|
||||||
@override_settings(
|
|
||||||
APP_TITLE=None,
|
|
||||||
APP_LOGO=None,
|
|
||||||
AUDIT_LOG_ENABLED=True,
|
|
||||||
EMPTY_TRASH_DELAY=30,
|
|
||||||
ENABLE_UPDATE_CHECK="default",
|
|
||||||
EMAIL_ENABLED=False,
|
|
||||||
GMAIL_OAUTH_ENABLED=False,
|
|
||||||
OUTLOOK_OAUTH_ENABLED=False,
|
|
||||||
)
|
|
||||||
def test_api_get_ui_settings(self) -> None:
|
def test_api_get_ui_settings(self) -> None:
|
||||||
response = self.client.get(self.ENDPOINT, format="json")
|
response = self.client.get(self.ENDPOINT, format="json")
|
||||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||||
|
|||||||
@@ -919,7 +919,6 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
|
|||||||
@override_settings(
|
@override_settings(
|
||||||
CONSUMER_ENABLE_TAG_BARCODE=True,
|
CONSUMER_ENABLE_TAG_BARCODE=True,
|
||||||
CONSUMER_TAG_BARCODE_MAPPING={"ASN(.*)": "\\g<1>"},
|
CONSUMER_TAG_BARCODE_MAPPING={"ASN(.*)": "\\g<1>"},
|
||||||
CONSUMER_ENABLE_ASN_BARCODE=False,
|
|
||||||
)
|
)
|
||||||
def test_scan_file_for_many_custom_tags(self) -> None:
|
def test_scan_file_for_many_custom_tags(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -329,14 +329,18 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
FILENAME_FORMAT="{added_year}-{added_month}-{added_day}",
|
FILENAME_FORMAT="{added_year}-{added_month}-{added_day}",
|
||||||
)
|
)
|
||||||
def test_added_year_month_day(self) -> None:
|
def test_added_year_month_day(self) -> None:
|
||||||
d1 = timezone.make_aware(datetime.datetime(1232, 1, 9, 1, 1, 1))
|
d1 = timezone.make_aware(datetime.datetime(232, 1, 9, 1, 1, 1))
|
||||||
doc1 = Document.objects.create(
|
doc1 = Document.objects.create(
|
||||||
title="doc1",
|
title="doc1",
|
||||||
mime_type="application/pdf",
|
mime_type="application/pdf",
|
||||||
added=d1,
|
added=d1,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc1), Path("1232-01-09.pdf"))
|
# Account for 3.14 padding changes
|
||||||
|
expected_year: str = d1.strftime("%Y")
|
||||||
|
expected_filename: Path = Path(f"{expected_year}-01-09.pdf")
|
||||||
|
|
||||||
|
self.assertEqual(generate_filename(doc1), expected_filename)
|
||||||
|
|
||||||
doc1.added = timezone.make_aware(datetime.datetime(2020, 11, 16, 1, 1, 1))
|
doc1.added = timezone.make_aware(datetime.datetime(2020, 11, 16, 1, 1, 1))
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ class TestDateLocalization:
|
|||||||
14,
|
14,
|
||||||
30,
|
30,
|
||||||
5,
|
5,
|
||||||
tzinfo=datetime.timezone.utc,
|
tzinfo=datetime.UTC,
|
||||||
)
|
)
|
||||||
|
|
||||||
TEST_DATETIME_STRING: str = "2023-10-26T14:30:05+00:00"
|
TEST_DATETIME_STRING: str = "2023-10-26T14:30:05+00:00"
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ from io import StringIO
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
import pytest
|
|
||||||
from auditlog.models import LogEntry
|
from auditlog.models import LogEntry
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
@@ -20,7 +19,6 @@ from documents.tests.utils import FileSystemAssertsMixin
|
|||||||
sample_file: Path = Path(__file__).parent / "samples" / "simple.pdf"
|
sample_file: Path = Path(__file__).parent / "samples" / "simple.pdf"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
|
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
|
||||||
class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||||
def make_models(self):
|
def make_models(self):
|
||||||
@@ -96,7 +94,6 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
self.assertEqual(doc2.archive_filename, "document_01.pdf")
|
self.assertEqual(doc2.archive_filename, "document_01.pdf")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestMakeIndex(TestCase):
|
class TestMakeIndex(TestCase):
|
||||||
@mock.patch("documents.management.commands.document_index.index_reindex")
|
@mock.patch("documents.management.commands.document_index.index_reindex")
|
||||||
def test_reindex(self, m) -> None:
|
def test_reindex(self, m) -> None:
|
||||||
@@ -109,7 +106,6 @@ class TestMakeIndex(TestCase):
|
|||||||
m.assert_called_once()
|
m.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestRenamer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
class TestRenamer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||||
@override_settings(FILENAME_FORMAT="")
|
@override_settings(FILENAME_FORMAT="")
|
||||||
def test_rename(self) -> None:
|
def test_rename(self) -> None:
|
||||||
@@ -144,7 +140,6 @@ class TestCreateClassifier(TestCase):
|
|||||||
m.assert_called_once()
|
m.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestSanityChecker(DirectoriesMixin, TestCase):
|
class TestSanityChecker(DirectoriesMixin, TestCase):
|
||||||
def test_no_issues(self) -> None:
|
def test_no_issues(self) -> None:
|
||||||
with self.assertLogs() as capture:
|
with self.assertLogs() as capture:
|
||||||
@@ -170,7 +165,6 @@ class TestSanityChecker(DirectoriesMixin, TestCase):
|
|||||||
self.assertIn("Checksum mismatch. Stored: abc, actual:", capture.output[1])
|
self.assertIn("Checksum mismatch. Stored: abc, actual:", capture.output[1])
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestConvertMariaDBUUID(TestCase):
|
class TestConvertMariaDBUUID(TestCase):
|
||||||
@mock.patch("django.db.connection.schema_editor")
|
@mock.patch("django.db.connection.schema_editor")
|
||||||
def test_convert(self, m) -> None:
|
def test_convert(self, m) -> None:
|
||||||
@@ -184,7 +178,6 @@ class TestConvertMariaDBUUID(TestCase):
|
|||||||
self.assertIn("Successfully converted", stdout.getvalue())
|
self.assertIn("Successfully converted", stdout.getvalue())
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestPruneAuditLogs(TestCase):
|
class TestPruneAuditLogs(TestCase):
|
||||||
def test_prune_audit_logs(self) -> None:
|
def test_prune_audit_logs(self) -> None:
|
||||||
LogEntry.objects.create(
|
LogEntry.objects.create(
|
||||||
|
|||||||
@@ -577,7 +577,6 @@ class TestTagsFromPath:
|
|||||||
assert len(tag_ids) == 0
|
assert len(tag_ids) == 0
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestCommandValidation:
|
class TestCommandValidation:
|
||||||
"""Tests for command argument validation."""
|
"""Tests for command argument validation."""
|
||||||
|
|
||||||
@@ -606,7 +605,6 @@ class TestCommandValidation:
|
|||||||
cmd.handle(directory=str(sample_pdf), oneshot=True, testing=False)
|
cmd.handle(directory=str(sample_pdf), oneshot=True, testing=False)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
@pytest.mark.usefixtures("mock_supported_extensions")
|
@pytest.mark.usefixtures("mock_supported_extensions")
|
||||||
class TestCommandOneshot:
|
class TestCommandOneshot:
|
||||||
"""Tests for oneshot mode."""
|
"""Tests for oneshot mode."""
|
||||||
@@ -777,7 +775,6 @@ def start_consumer(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
class TestCommandWatch:
|
class TestCommandWatch:
|
||||||
"""Integration tests for the watch loop."""
|
"""Integration tests for the watch loop."""
|
||||||
@@ -899,7 +896,6 @@ class TestCommandWatch:
|
|||||||
assert not thread.is_alive()
|
assert not thread.is_alive()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
class TestCommandWatchPolling:
|
class TestCommandWatchPolling:
|
||||||
"""Tests for polling mode."""
|
"""Tests for polling mode."""
|
||||||
@@ -932,7 +928,6 @@ class TestCommandWatchPolling:
|
|||||||
mock_consume_file_delay.delay.assert_called()
|
mock_consume_file_delay.delay.assert_called()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
class TestCommandWatchRecursive:
|
class TestCommandWatchRecursive:
|
||||||
"""Tests for recursive watching."""
|
"""Tests for recursive watching."""
|
||||||
@@ -996,7 +991,6 @@ class TestCommandWatchRecursive:
|
|||||||
assert len(overrides.tag_ids) == 2
|
assert len(overrides.tag_ids) == 2
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
class TestCommandWatchEdgeCases:
|
class TestCommandWatchEdgeCases:
|
||||||
"""Tests for edge cases and error handling."""
|
"""Tests for edge cases and error handling."""
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ from pathlib import Path
|
|||||||
from unittest import mock
|
from unittest import mock
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
import pytest
|
|
||||||
from allauth.socialaccount.models import SocialAccount
|
from allauth.socialaccount.models import SocialAccount
|
||||||
from allauth.socialaccount.models import SocialApp
|
from allauth.socialaccount.models import SocialApp
|
||||||
from allauth.socialaccount.models import SocialToken
|
from allauth.socialaccount.models import SocialToken
|
||||||
@@ -46,7 +45,6 @@ from documents.tests.utils import paperless_environment
|
|||||||
from paperless_mail.models import MailAccount
|
from paperless_mail.models import MailAccount
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestExportImport(
|
class TestExportImport(
|
||||||
DirectoriesMixin,
|
DirectoriesMixin,
|
||||||
FileSystemAssertsMixin,
|
FileSystemAssertsMixin,
|
||||||
@@ -848,7 +846,6 @@ class TestExportImport(
|
|||||||
self.assertEqual(Document.objects.all().count(), 4)
|
self.assertEqual(Document.objects.all().count(), 4)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestCryptExportImport(
|
class TestCryptExportImport(
|
||||||
DirectoriesMixin,
|
DirectoriesMixin,
|
||||||
FileSystemAssertsMixin,
|
FileSystemAssertsMixin,
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
import pytest
|
|
||||||
from django.core.management import CommandError
|
from django.core.management import CommandError
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
@@ -8,7 +7,6 @@ from django.test import TestCase
|
|||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestFuzzyMatchCommand(TestCase):
|
class TestFuzzyMatchCommand(TestCase):
|
||||||
MSG_REGEX = r"Document \d fuzzy match to \d \(confidence \d\d\.\d\d\d\)"
|
MSG_REGEX = r"Document \d fuzzy match to \d \(confidence \d\d\.\d\d\d\)"
|
||||||
|
|
||||||
@@ -51,6 +49,19 @@ class TestFuzzyMatchCommand(TestCase):
|
|||||||
self.call_command("--ratio", "101")
|
self.call_command("--ratio", "101")
|
||||||
self.assertIn("The ratio must be between 0 and 100", str(e.exception))
|
self.assertIn("The ratio must be between 0 and 100", str(e.exception))
|
||||||
|
|
||||||
|
def test_invalid_process_count(self) -> None:
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Invalid process count less than 0 above upper
|
||||||
|
WHEN:
|
||||||
|
- Command is called
|
||||||
|
THEN:
|
||||||
|
- Error is raised indicating issue
|
||||||
|
"""
|
||||||
|
with self.assertRaises(CommandError) as e:
|
||||||
|
self.call_command("--processes", "0")
|
||||||
|
self.assertIn("There must be at least 1 process", str(e.exception))
|
||||||
|
|
||||||
def test_no_matches(self) -> None:
|
def test_no_matches(self) -> None:
|
||||||
"""
|
"""
|
||||||
GIVEN:
|
GIVEN:
|
||||||
@@ -140,7 +151,7 @@ class TestFuzzyMatchCommand(TestCase):
|
|||||||
mime_type="application/pdf",
|
mime_type="application/pdf",
|
||||||
filename="final_test.pdf",
|
filename="final_test.pdf",
|
||||||
)
|
)
|
||||||
stdout, _ = self.call_command("--no-progress-bar", "--processes", "1")
|
stdout, _ = self.call_command()
|
||||||
lines = [x.strip() for x in stdout.splitlines() if x.strip()]
|
lines = [x.strip() for x in stdout.splitlines() if x.strip()]
|
||||||
self.assertEqual(len(lines), 3)
|
self.assertEqual(len(lines), 3)
|
||||||
for line in lines:
|
for line in lines:
|
||||||
@@ -183,12 +194,7 @@ class TestFuzzyMatchCommand(TestCase):
|
|||||||
|
|
||||||
self.assertEqual(Document.objects.count(), 3)
|
self.assertEqual(Document.objects.count(), 3)
|
||||||
|
|
||||||
stdout, _ = self.call_command(
|
stdout, _ = self.call_command("--delete")
|
||||||
"--delete",
|
|
||||||
"--no-progress-bar",
|
|
||||||
"--processes",
|
|
||||||
"1",
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertIn(
|
self.assertIn(
|
||||||
"The command is configured to delete documents. Use with caution",
|
"The command is configured to delete documents. Use with caution",
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ from io import StringIO
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
import pytest
|
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
from django.core.management.base import CommandError
|
from django.core.management.base import CommandError
|
||||||
@@ -19,7 +18,6 @@ from documents.tests.utils import FileSystemAssertsMixin
|
|||||||
from documents.tests.utils import SampleDirMixin
|
from documents.tests.utils import SampleDirMixin
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestCommandImport(
|
class TestCommandImport(
|
||||||
DirectoriesMixin,
|
DirectoriesMixin,
|
||||||
FileSystemAssertsMixin,
|
FileSystemAssertsMixin,
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import pytest
|
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
from django.core.management.base import CommandError
|
from django.core.management.base import CommandError
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
@@ -11,7 +10,6 @@ from documents.models import Tag
|
|||||||
from documents.tests.utils import DirectoriesMixin
|
from documents.tests.utils import DirectoriesMixin
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestRetagger(DirectoriesMixin, TestCase):
|
class TestRetagger(DirectoriesMixin, TestCase):
|
||||||
def make_models(self) -> None:
|
def make_models(self) -> None:
|
||||||
self.sp1 = StoragePath.objects.create(
|
self.sp1 = StoragePath.objects.create(
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import os
|
|||||||
from io import StringIO
|
from io import StringIO
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
import pytest
|
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
@@ -10,7 +9,6 @@ from django.test import TestCase
|
|||||||
from documents.tests.utils import DirectoriesMixin
|
from documents.tests.utils import DirectoriesMixin
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestManageSuperUser(DirectoriesMixin, TestCase):
|
class TestManageSuperUser(DirectoriesMixin, TestCase):
|
||||||
def call_command(self, environ):
|
def call_command(self, environ):
|
||||||
out = StringIO()
|
out = StringIO()
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import shutil
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
import pytest
|
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|
||||||
@@ -13,7 +12,6 @@ from documents.tests.utils import DirectoriesMixin
|
|||||||
from documents.tests.utils import FileSystemAssertsMixin
|
from documents.tests.utils import FileSystemAssertsMixin
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.management
|
|
||||||
class TestMakeThumbnails(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
class TestMakeThumbnails(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||||
def make_models(self) -> None:
|
def make_models(self) -> None:
|
||||||
self.d1 = Document.objects.create(
|
self.d1 = Document.objects.create(
|
||||||
|
|||||||
@@ -1742,48 +1742,6 @@ class TestWorkflows(
|
|||||||
|
|
||||||
self.assertEqual(doc.title, "Doc {created_year]")
|
self.assertEqual(doc.title, "Doc {created_year]")
|
||||||
|
|
||||||
def test_document_updated_workflow_ignores_version_documents(self) -> None:
|
|
||||||
trigger = WorkflowTrigger.objects.create(
|
|
||||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
|
||||||
)
|
|
||||||
action = WorkflowAction.objects.create(
|
|
||||||
assign_title="Doc assign owner",
|
|
||||||
assign_owner=self.user2,
|
|
||||||
)
|
|
||||||
workflow = Workflow.objects.create(
|
|
||||||
name="Workflow 1",
|
|
||||||
order=0,
|
|
||||||
)
|
|
||||||
workflow.triggers.add(trigger)
|
|
||||||
workflow.actions.add(action)
|
|
||||||
|
|
||||||
root_doc = Document.objects.create(
|
|
||||||
title="root",
|
|
||||||
correspondent=self.c,
|
|
||||||
original_filename="root.pdf",
|
|
||||||
)
|
|
||||||
version_doc = Document.objects.create(
|
|
||||||
title="version",
|
|
||||||
correspondent=self.c,
|
|
||||||
original_filename="version.pdf",
|
|
||||||
root_document=root_doc,
|
|
||||||
)
|
|
||||||
|
|
||||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, version_doc)
|
|
||||||
|
|
||||||
root_doc.refresh_from_db()
|
|
||||||
version_doc.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertIsNone(root_doc.owner)
|
|
||||||
self.assertIsNone(version_doc.owner)
|
|
||||||
self.assertFalse(
|
|
||||||
WorkflowRun.objects.filter(
|
|
||||||
workflow=workflow,
|
|
||||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
|
||||||
document=version_doc,
|
|
||||||
).exists(),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_document_updated_workflow(self) -> None:
|
def test_document_updated_workflow(self) -> None:
|
||||||
trigger = WorkflowTrigger.objects.create(
|
trigger = WorkflowTrigger.objects.create(
|
||||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||||
@@ -2052,60 +2010,6 @@ class TestWorkflows(
|
|||||||
doc.refresh_from_db()
|
doc.refresh_from_db()
|
||||||
self.assertEqual(doc.owner, self.user2)
|
self.assertEqual(doc.owner, self.user2)
|
||||||
|
|
||||||
def test_workflow_scheduled_trigger_ignores_version_documents(self) -> None:
|
|
||||||
trigger = WorkflowTrigger.objects.create(
|
|
||||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
|
||||||
schedule_offset_days=1,
|
|
||||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.ADDED,
|
|
||||||
)
|
|
||||||
action = WorkflowAction.objects.create(
|
|
||||||
assign_title="Doc assign owner",
|
|
||||||
assign_owner=self.user2,
|
|
||||||
)
|
|
||||||
workflow = Workflow.objects.create(
|
|
||||||
name="Workflow 1",
|
|
||||||
order=0,
|
|
||||||
)
|
|
||||||
workflow.triggers.add(trigger)
|
|
||||||
workflow.actions.add(action)
|
|
||||||
|
|
||||||
root_doc = Document.objects.create(
|
|
||||||
title="root",
|
|
||||||
correspondent=self.c,
|
|
||||||
original_filename="root.pdf",
|
|
||||||
added=timezone.now() - timedelta(days=10),
|
|
||||||
)
|
|
||||||
version_doc = Document.objects.create(
|
|
||||||
title="version",
|
|
||||||
correspondent=self.c,
|
|
||||||
original_filename="version.pdf",
|
|
||||||
root_document=root_doc,
|
|
||||||
added=timezone.now() - timedelta(days=10),
|
|
||||||
)
|
|
||||||
|
|
||||||
tasks.check_scheduled_workflows()
|
|
||||||
|
|
||||||
root_doc.refresh_from_db()
|
|
||||||
version_doc.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEqual(root_doc.owner, self.user2)
|
|
||||||
self.assertIsNone(version_doc.owner)
|
|
||||||
self.assertEqual(
|
|
||||||
WorkflowRun.objects.filter(
|
|
||||||
workflow=workflow,
|
|
||||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
|
||||||
document=root_doc,
|
|
||||||
).count(),
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
self.assertFalse(
|
|
||||||
WorkflowRun.objects.filter(
|
|
||||||
workflow=workflow,
|
|
||||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
|
||||||
document=version_doc,
|
|
||||||
).exists(),
|
|
||||||
)
|
|
||||||
|
|
||||||
@mock.patch("documents.models.Document.objects.filter", autospec=True)
|
@mock.patch("documents.models.Document.objects.filter", autospec=True)
|
||||||
def test_workflow_scheduled_trigger_modified(self, mock_filter) -> None:
|
def test_workflow_scheduled_trigger_modified(self, mock_filter) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -4666,7 +4570,7 @@ class TestDateWorkflowLocalization(
|
|||||||
14,
|
14,
|
||||||
30,
|
30,
|
||||||
5,
|
5,
|
||||||
tzinfo=datetime.timezone.utc,
|
tzinfo=datetime.UTC,
|
||||||
)
|
)
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|||||||
@@ -33,11 +33,11 @@ from documents.plugins.helpers import ProgressStatusOptions
|
|||||||
def setup_directories():
|
def setup_directories():
|
||||||
dirs = namedtuple("Dirs", ())
|
dirs = namedtuple("Dirs", ())
|
||||||
|
|
||||||
dirs.data_dir = Path(tempfile.mkdtemp()).resolve()
|
dirs.data_dir = Path(tempfile.mkdtemp())
|
||||||
dirs.scratch_dir = Path(tempfile.mkdtemp()).resolve()
|
dirs.scratch_dir = Path(tempfile.mkdtemp())
|
||||||
dirs.media_dir = Path(tempfile.mkdtemp()).resolve()
|
dirs.media_dir = Path(tempfile.mkdtemp())
|
||||||
dirs.consumption_dir = Path(tempfile.mkdtemp()).resolve()
|
dirs.consumption_dir = Path(tempfile.mkdtemp())
|
||||||
dirs.static_dir = Path(tempfile.mkdtemp()).resolve()
|
dirs.static_dir = Path(tempfile.mkdtemp())
|
||||||
dirs.index_dir = dirs.data_dir / "index"
|
dirs.index_dir = dirs.data_dir / "index"
|
||||||
dirs.originals_dir = dirs.media_dir / "documents" / "originals"
|
dirs.originals_dir = dirs.media_dir / "documents" / "originals"
|
||||||
dirs.thumbnail_dir = dirs.media_dir / "documents" / "thumbnails"
|
dirs.thumbnail_dir = dirs.media_dir / "documents" / "thumbnails"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -11,7 +11,7 @@ if TYPE_CHECKING:
|
|||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
|
|
||||||
|
|
||||||
class VersionResolutionError(str, Enum):
|
class VersionResolutionError(StrEnum):
|
||||||
INVALID = "invalid"
|
INVALID = "invalid"
|
||||||
NOT_FOUND = "not_found"
|
NOT_FOUND = "not_found"
|
||||||
|
|
||||||
|
|||||||
@@ -1758,11 +1758,6 @@ class DocumentViewSet(
|
|||||||
.order_by("-id")
|
.order_by("-id")
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
|
|
||||||
document_updated.send(
|
|
||||||
sender=self.__class__,
|
|
||||||
document=root_doc,
|
|
||||||
)
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"result": "OK",
|
"result": "OK",
|
||||||
@@ -1832,11 +1827,6 @@ class DocumentViewSet(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
document_updated.send(
|
|
||||||
sender=self.__class__,
|
|
||||||
document=root_doc,
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"id": version_doc.id,
|
"id": version_doc.id,
|
||||||
@@ -2444,13 +2434,6 @@ class SelectionDataView(GenericAPIView):
|
|||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
ids = serializer.validated_data.get("documents")
|
ids = serializer.validated_data.get("documents")
|
||||||
permitted_documents = get_objects_for_user_owner_aware(
|
|
||||||
request.user,
|
|
||||||
"documents.view_document",
|
|
||||||
Document,
|
|
||||||
)
|
|
||||||
if permitted_documents.filter(pk__in=ids).count() != len(ids):
|
|
||||||
return HttpResponseForbidden("Insufficient permissions")
|
|
||||||
|
|
||||||
correspondents = Correspondent.objects.annotate(
|
correspondents = Correspondent.objects.annotate(
|
||||||
document_count=Count(
|
document_count=Count(
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ msgid ""
|
|||||||
msgstr ""
|
msgstr ""
|
||||||
"Project-Id-Version: paperless-ngx\n"
|
"Project-Id-Version: paperless-ngx\n"
|
||||||
"Report-Msgid-Bugs-To: \n"
|
"Report-Msgid-Bugs-To: \n"
|
||||||
"POT-Creation-Date: 2026-03-02 16:22+0000\n"
|
"POT-Creation-Date: 2026-02-26 18:09+0000\n"
|
||||||
"PO-Revision-Date: 2022-02-17 04:17\n"
|
"PO-Revision-Date: 2022-02-17 04:17\n"
|
||||||
"Last-Translator: \n"
|
"Last-Translator: \n"
|
||||||
"Language-Team: English\n"
|
"Language-Team: English\n"
|
||||||
@@ -1299,47 +1299,47 @@ msgstr ""
|
|||||||
msgid "workflow runs"
|
msgid "workflow runs"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:463 documents/serialisers.py:2333
|
#: documents/serialisers.py:462
|
||||||
msgid "Insufficient permissions."
|
msgid "Insufficient permissions."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:651
|
#: documents/serialisers.py:650
|
||||||
msgid "Invalid color."
|
msgid "Invalid color."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1956
|
#: documents/serialisers.py:1955
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "File type %(type)s not supported"
|
msgid "File type %(type)s not supported"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:2000
|
#: documents/serialisers.py:1999
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "Custom field id must be an integer: %(id)s"
|
msgid "Custom field id must be an integer: %(id)s"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:2007
|
#: documents/serialisers.py:2006
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "Custom field with id %(id)s does not exist"
|
msgid "Custom field with id %(id)s does not exist"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:2024 documents/serialisers.py:2034
|
#: documents/serialisers.py:2023 documents/serialisers.py:2033
|
||||||
msgid ""
|
msgid ""
|
||||||
"Custom fields must be a list of integers or an object mapping ids to values."
|
"Custom fields must be a list of integers or an object mapping ids to values."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:2029
|
#: documents/serialisers.py:2028
|
||||||
msgid "Some custom fields don't exist or were specified twice."
|
msgid "Some custom fields don't exist or were specified twice."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:2176
|
#: documents/serialisers.py:2175
|
||||||
msgid "Invalid variable detected."
|
msgid "Invalid variable detected."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:2389
|
#: documents/serialisers.py:2377
|
||||||
msgid "Duplicate document identifiers are not allowed."
|
msgid "Duplicate document identifiers are not allowed."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:2419 documents/views.py:3327
|
#: documents/serialisers.py:2407 documents/views.py:3310
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "Documents not found: %(ids)s"
|
msgid "Documents not found: %(ids)s"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
@@ -1603,20 +1603,20 @@ msgstr ""
|
|||||||
msgid "Unable to parse URI {value}"
|
msgid "Unable to parse URI {value}"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/views.py:3339
|
#: documents/views.py:3322
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "Insufficient permissions to share document %(id)s."
|
msgid "Insufficient permissions to share document %(id)s."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/views.py:3382
|
#: documents/views.py:3365
|
||||||
msgid "Bundle is already being processed."
|
msgid "Bundle is already being processed."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/views.py:3439
|
#: documents/views.py:3422
|
||||||
msgid "The share link bundle is still being prepared. Please try again later."
|
msgid "The share link bundle is still being prepared. Please try again later."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/views.py:3449
|
#: documents/views.py:3432
|
||||||
msgid "The share link bundle is unavailable."
|
msgid "The share link bundle is unavailable."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@@ -1856,151 +1856,151 @@ msgstr ""
|
|||||||
msgid "paperless application settings"
|
msgid "paperless application settings"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:746
|
#: paperless/settings.py:819
|
||||||
msgid "English (US)"
|
msgid "English (US)"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:747
|
#: paperless/settings.py:820
|
||||||
msgid "Arabic"
|
msgid "Arabic"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:748
|
#: paperless/settings.py:821
|
||||||
msgid "Afrikaans"
|
msgid "Afrikaans"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:749
|
#: paperless/settings.py:822
|
||||||
msgid "Belarusian"
|
msgid "Belarusian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:750
|
#: paperless/settings.py:823
|
||||||
msgid "Bulgarian"
|
msgid "Bulgarian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:751
|
#: paperless/settings.py:824
|
||||||
msgid "Catalan"
|
msgid "Catalan"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:752
|
#: paperless/settings.py:825
|
||||||
msgid "Czech"
|
msgid "Czech"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:753
|
#: paperless/settings.py:826
|
||||||
msgid "Danish"
|
msgid "Danish"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:754
|
#: paperless/settings.py:827
|
||||||
msgid "German"
|
msgid "German"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:755
|
#: paperless/settings.py:828
|
||||||
msgid "Greek"
|
msgid "Greek"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:756
|
#: paperless/settings.py:829
|
||||||
msgid "English (GB)"
|
msgid "English (GB)"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:757
|
#: paperless/settings.py:830
|
||||||
msgid "Spanish"
|
msgid "Spanish"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:758
|
#: paperless/settings.py:831
|
||||||
msgid "Persian"
|
msgid "Persian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:759
|
#: paperless/settings.py:832
|
||||||
msgid "Finnish"
|
msgid "Finnish"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:760
|
#: paperless/settings.py:833
|
||||||
msgid "French"
|
msgid "French"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:761
|
#: paperless/settings.py:834
|
||||||
msgid "Hungarian"
|
msgid "Hungarian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:762
|
#: paperless/settings.py:835
|
||||||
msgid "Indonesian"
|
msgid "Indonesian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:763
|
#: paperless/settings.py:836
|
||||||
msgid "Italian"
|
msgid "Italian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:764
|
#: paperless/settings.py:837
|
||||||
msgid "Japanese"
|
msgid "Japanese"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:765
|
#: paperless/settings.py:838
|
||||||
msgid "Korean"
|
msgid "Korean"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:766
|
#: paperless/settings.py:839
|
||||||
msgid "Luxembourgish"
|
msgid "Luxembourgish"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:767
|
#: paperless/settings.py:840
|
||||||
msgid "Norwegian"
|
msgid "Norwegian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:768
|
#: paperless/settings.py:841
|
||||||
msgid "Dutch"
|
msgid "Dutch"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:769
|
#: paperless/settings.py:842
|
||||||
msgid "Polish"
|
msgid "Polish"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:770
|
#: paperless/settings.py:843
|
||||||
msgid "Portuguese (Brazil)"
|
msgid "Portuguese (Brazil)"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:771
|
#: paperless/settings.py:844
|
||||||
msgid "Portuguese"
|
msgid "Portuguese"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:772
|
#: paperless/settings.py:845
|
||||||
msgid "Romanian"
|
msgid "Romanian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:773
|
#: paperless/settings.py:846
|
||||||
msgid "Russian"
|
msgid "Russian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:774
|
#: paperless/settings.py:847
|
||||||
msgid "Slovak"
|
msgid "Slovak"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:775
|
#: paperless/settings.py:848
|
||||||
msgid "Slovenian"
|
msgid "Slovenian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:776
|
#: paperless/settings.py:849
|
||||||
msgid "Serbian"
|
msgid "Serbian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:777
|
#: paperless/settings.py:850
|
||||||
msgid "Swedish"
|
msgid "Swedish"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:778
|
#: paperless/settings.py:851
|
||||||
msgid "Turkish"
|
msgid "Turkish"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:779
|
#: paperless/settings.py:852
|
||||||
msgid "Ukrainian"
|
msgid "Ukrainian"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:780
|
#: paperless/settings.py:853
|
||||||
msgid "Vietnamese"
|
msgid "Vietnamese"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:781
|
#: paperless/settings.py:854
|
||||||
msgid "Chinese Simplified"
|
msgid "Chinese Simplified"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: paperless/settings/__init__.py:782
|
#: paperless/settings.py:855
|
||||||
msgid "Chinese Traditional"
|
msgid "Chinese Traditional"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
|||||||
@@ -202,43 +202,3 @@ def audit_log_check(app_configs, **kwargs):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@register()
|
|
||||||
def check_deprecated_db_settings(
|
|
||||||
app_configs: object,
|
|
||||||
**kwargs: object,
|
|
||||||
) -> list[Warning]:
|
|
||||||
"""Check for deprecated database environment variables.
|
|
||||||
|
|
||||||
Detects legacy advanced options that should be migrated to
|
|
||||||
PAPERLESS_DB_OPTIONS. Returns one Warning per deprecated variable found.
|
|
||||||
"""
|
|
||||||
deprecated_vars: dict[str, str] = {
|
|
||||||
"PAPERLESS_DB_TIMEOUT": "timeout",
|
|
||||||
"PAPERLESS_DB_POOLSIZE": "pool.min_size / pool.max_size",
|
|
||||||
"PAPERLESS_DBSSLMODE": "sslmode",
|
|
||||||
"PAPERLESS_DBSSLROOTCERT": "sslrootcert",
|
|
||||||
"PAPERLESS_DBSSLCERT": "sslcert",
|
|
||||||
"PAPERLESS_DBSSLKEY": "sslkey",
|
|
||||||
}
|
|
||||||
|
|
||||||
warnings: list[Warning] = []
|
|
||||||
|
|
||||||
for var_name, db_option_key in deprecated_vars.items():
|
|
||||||
if not os.getenv(var_name):
|
|
||||||
continue
|
|
||||||
warnings.append(
|
|
||||||
Warning(
|
|
||||||
f"Deprecated environment variable: {var_name}",
|
|
||||||
hint=(
|
|
||||||
f"{var_name} is no longer supported and will be removed in v3.2. "
|
|
||||||
f"Set the equivalent option via PAPERLESS_DB_OPTIONS instead. "
|
|
||||||
f'Example: PAPERLESS_DB_OPTIONS=\'{{"{db_option_key}": "<value>"}}\'. '
|
|
||||||
"See https://docs.paperless-ngx.com/migration/ for the full reference."
|
|
||||||
),
|
|
||||||
id="paperless.W001",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
return warnings
|
|
||||||
|
|||||||
@@ -17,8 +17,6 @@ from dateparser.languages.loader import LocaleDataLoader
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
from paperless.settings.custom import parse_db_settings
|
|
||||||
|
|
||||||
logger = logging.getLogger("paperless.settings")
|
logger = logging.getLogger("paperless.settings")
|
||||||
|
|
||||||
# Tap paperless.conf if it's available
|
# Tap paperless.conf if it's available
|
||||||
@@ -284,7 +282,7 @@ DEBUG = __get_boolean("PAPERLESS_DEBUG", "NO")
|
|||||||
# Directories #
|
# Directories #
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
BASE_DIR: Path = Path(__file__).resolve().parent.parent.parent
|
BASE_DIR: Path = Path(__file__).resolve().parent.parent
|
||||||
|
|
||||||
STATIC_ROOT = __get_path("PAPERLESS_STATICDIR", BASE_DIR.parent / "static")
|
STATIC_ROOT = __get_path("PAPERLESS_STATICDIR", BASE_DIR.parent / "static")
|
||||||
|
|
||||||
@@ -724,8 +722,83 @@ EMAIL_CERTIFICATE_FILE = __get_optional_path("PAPERLESS_EMAIL_CERTIFICATE_LOCATI
|
|||||||
###############################################################################
|
###############################################################################
|
||||||
# Database #
|
# Database #
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
def _parse_db_settings() -> dict:
|
||||||
|
databases = {
|
||||||
|
"default": {
|
||||||
|
"ENGINE": "django.db.backends.sqlite3",
|
||||||
|
"NAME": DATA_DIR / "db.sqlite3",
|
||||||
|
"OPTIONS": {},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if os.getenv("PAPERLESS_DBHOST"):
|
||||||
|
# Have sqlite available as a second option for management commands
|
||||||
|
# This is important when migrating to/from sqlite
|
||||||
|
databases["sqlite"] = databases["default"].copy()
|
||||||
|
|
||||||
DATABASES = parse_db_settings(DATA_DIR)
|
databases["default"] = {
|
||||||
|
"HOST": os.getenv("PAPERLESS_DBHOST"),
|
||||||
|
"NAME": os.getenv("PAPERLESS_DBNAME", "paperless"),
|
||||||
|
"USER": os.getenv("PAPERLESS_DBUSER", "paperless"),
|
||||||
|
"PASSWORD": os.getenv("PAPERLESS_DBPASS", "paperless"),
|
||||||
|
"OPTIONS": {},
|
||||||
|
}
|
||||||
|
if os.getenv("PAPERLESS_DBPORT"):
|
||||||
|
databases["default"]["PORT"] = os.getenv("PAPERLESS_DBPORT")
|
||||||
|
|
||||||
|
# Leave room for future extensibility
|
||||||
|
if os.getenv("PAPERLESS_DBENGINE") == "mariadb":
|
||||||
|
engine = "django.db.backends.mysql"
|
||||||
|
# Contrary to Postgres, Django does not natively support connection pooling for MariaDB.
|
||||||
|
# However, since MariaDB uses threads instead of forks, establishing connections is significantly faster
|
||||||
|
# compared to PostgreSQL, so the lack of pooling is not an issue
|
||||||
|
options = {
|
||||||
|
"read_default_file": "/etc/mysql/my.cnf",
|
||||||
|
"charset": "utf8mb4",
|
||||||
|
"ssl_mode": os.getenv("PAPERLESS_DBSSLMODE", "PREFERRED"),
|
||||||
|
"ssl": {
|
||||||
|
"ca": os.getenv("PAPERLESS_DBSSLROOTCERT", None),
|
||||||
|
"cert": os.getenv("PAPERLESS_DBSSLCERT", None),
|
||||||
|
"key": os.getenv("PAPERLESS_DBSSLKEY", None),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
else: # Default to PostgresDB
|
||||||
|
engine = "django.db.backends.postgresql"
|
||||||
|
options = {
|
||||||
|
"sslmode": os.getenv("PAPERLESS_DBSSLMODE", "prefer"),
|
||||||
|
"sslrootcert": os.getenv("PAPERLESS_DBSSLROOTCERT", None),
|
||||||
|
"sslcert": os.getenv("PAPERLESS_DBSSLCERT", None),
|
||||||
|
"sslkey": os.getenv("PAPERLESS_DBSSLKEY", None),
|
||||||
|
}
|
||||||
|
if int(os.getenv("PAPERLESS_DB_POOLSIZE", 0)) > 0:
|
||||||
|
options.update(
|
||||||
|
{
|
||||||
|
"pool": {
|
||||||
|
"min_size": 1,
|
||||||
|
"max_size": int(os.getenv("PAPERLESS_DB_POOLSIZE")),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
databases["default"]["ENGINE"] = engine
|
||||||
|
databases["default"]["OPTIONS"].update(options)
|
||||||
|
|
||||||
|
if os.getenv("PAPERLESS_DB_TIMEOUT") is not None:
|
||||||
|
if databases["default"]["ENGINE"] == "django.db.backends.sqlite3":
|
||||||
|
databases["default"]["OPTIONS"].update(
|
||||||
|
{"timeout": int(os.getenv("PAPERLESS_DB_TIMEOUT"))},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
databases["default"]["OPTIONS"].update(
|
||||||
|
{"connect_timeout": int(os.getenv("PAPERLESS_DB_TIMEOUT"))},
|
||||||
|
)
|
||||||
|
databases["sqlite"]["OPTIONS"].update(
|
||||||
|
{"timeout": int(os.getenv("PAPERLESS_DB_TIMEOUT"))},
|
||||||
|
)
|
||||||
|
return databases
|
||||||
|
|
||||||
|
|
||||||
|
DATABASES = _parse_db_settings()
|
||||||
|
|
||||||
if os.getenv("PAPERLESS_DBENGINE") == "mariadb":
|
if os.getenv("PAPERLESS_DBENGINE") == "mariadb":
|
||||||
# Silence Django error on old MariaDB versions.
|
# Silence Django error on old MariaDB versions.
|
||||||
@@ -1,122 +0,0 @@
|
|||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from paperless.settings.parsers import get_choice_from_env
|
|
||||||
from paperless.settings.parsers import get_int_from_env
|
|
||||||
from paperless.settings.parsers import parse_dict_from_str
|
|
||||||
|
|
||||||
|
|
||||||
def parse_db_settings(data_dir: Path) -> dict[str, dict[str, Any]]:
|
|
||||||
"""Parse database settings from environment variables.
|
|
||||||
|
|
||||||
Core connection variables (no deprecation):
|
|
||||||
- PAPERLESS_DBENGINE (sqlite/postgresql/mariadb)
|
|
||||||
- PAPERLESS_DBHOST, PAPERLESS_DBPORT
|
|
||||||
- PAPERLESS_DBNAME, PAPERLESS_DBUSER, PAPERLESS_DBPASS
|
|
||||||
|
|
||||||
Advanced options can be set via:
|
|
||||||
- Legacy individual env vars (deprecated in v3.0, removed in v3.2)
|
|
||||||
- PAPERLESS_DB_OPTIONS (recommended v3+ approach)
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data_dir: The data directory path for SQLite database location.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A databases dict suitable for Django DATABASES setting.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
engine = get_choice_from_env(
|
|
||||||
"PAPERLESS_DBENGINE",
|
|
||||||
{"sqlite", "postgresql", "mariadb"},
|
|
||||||
default="sqlite",
|
|
||||||
)
|
|
||||||
except ValueError:
|
|
||||||
# MariaDB users already had to set PAPERLESS_DBENGINE, so it was picked up above
|
|
||||||
# SQLite users didn't need to set anything
|
|
||||||
engine = "postgresql" if "PAPERLESS_DBHOST" in os.environ else "sqlite"
|
|
||||||
|
|
||||||
db_config: dict[str, Any]
|
|
||||||
base_options: dict[str, Any]
|
|
||||||
|
|
||||||
match engine:
|
|
||||||
case "sqlite":
|
|
||||||
db_config = {
|
|
||||||
"ENGINE": "django.db.backends.sqlite3",
|
|
||||||
"NAME": str((data_dir / "db.sqlite3").resolve()),
|
|
||||||
}
|
|
||||||
base_options = {}
|
|
||||||
|
|
||||||
case "postgresql":
|
|
||||||
db_config = {
|
|
||||||
"ENGINE": "django.db.backends.postgresql",
|
|
||||||
"HOST": os.getenv("PAPERLESS_DBHOST"),
|
|
||||||
"NAME": os.getenv("PAPERLESS_DBNAME", "paperless"),
|
|
||||||
"USER": os.getenv("PAPERLESS_DBUSER", "paperless"),
|
|
||||||
"PASSWORD": os.getenv("PAPERLESS_DBPASS", "paperless"),
|
|
||||||
}
|
|
||||||
|
|
||||||
base_options = {
|
|
||||||
"sslmode": os.getenv("PAPERLESS_DBSSLMODE", "prefer"),
|
|
||||||
"sslrootcert": os.getenv("PAPERLESS_DBSSLROOTCERT"),
|
|
||||||
"sslcert": os.getenv("PAPERLESS_DBSSLCERT"),
|
|
||||||
"sslkey": os.getenv("PAPERLESS_DBSSLKEY"),
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pool_size := get_int_from_env("PAPERLESS_DB_POOLSIZE")) is not None:
|
|
||||||
base_options["pool"] = {
|
|
||||||
"min_size": 1,
|
|
||||||
"max_size": pool_size,
|
|
||||||
}
|
|
||||||
|
|
||||||
case "mariadb":
|
|
||||||
db_config = {
|
|
||||||
"ENGINE": "django.db.backends.mysql",
|
|
||||||
"HOST": os.getenv("PAPERLESS_DBHOST"),
|
|
||||||
"NAME": os.getenv("PAPERLESS_DBNAME", "paperless"),
|
|
||||||
"USER": os.getenv("PAPERLESS_DBUSER", "paperless"),
|
|
||||||
"PASSWORD": os.getenv("PAPERLESS_DBPASS", "paperless"),
|
|
||||||
}
|
|
||||||
|
|
||||||
base_options = {
|
|
||||||
"read_default_file": "/etc/mysql/my.cnf",
|
|
||||||
"charset": "utf8mb4",
|
|
||||||
"collation": "utf8mb4_unicode_ci",
|
|
||||||
"ssl_mode": os.getenv("PAPERLESS_DBSSLMODE", "PREFERRED"),
|
|
||||||
"ssl": {
|
|
||||||
"ca": os.getenv("PAPERLESS_DBSSLROOTCERT"),
|
|
||||||
"cert": os.getenv("PAPERLESS_DBSSLCERT"),
|
|
||||||
"key": os.getenv("PAPERLESS_DBSSLKEY"),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
case _: # pragma: no cover
|
|
||||||
raise NotImplementedError(engine)
|
|
||||||
|
|
||||||
# Handle port setting for external databases
|
|
||||||
if (
|
|
||||||
engine in ("postgresql", "mariadb")
|
|
||||||
and (port := get_int_from_env("PAPERLESS_DBPORT")) is not None
|
|
||||||
):
|
|
||||||
db_config["PORT"] = port
|
|
||||||
|
|
||||||
# Handle timeout setting (common across all engines, different key names)
|
|
||||||
if (timeout := get_int_from_env("PAPERLESS_DB_TIMEOUT")) is not None:
|
|
||||||
timeout_key = "timeout" if engine == "sqlite" else "connect_timeout"
|
|
||||||
base_options[timeout_key] = timeout
|
|
||||||
|
|
||||||
# Apply PAPERLESS_DB_OPTIONS overrides
|
|
||||||
db_config["OPTIONS"] = parse_dict_from_str(
|
|
||||||
os.getenv("PAPERLESS_DB_OPTIONS"),
|
|
||||||
defaults=base_options,
|
|
||||||
separator=";",
|
|
||||||
type_map={
|
|
||||||
# SQLite options
|
|
||||||
"timeout": int,
|
|
||||||
# Postgres/MariaDB options
|
|
||||||
"connect_timeout": int,
|
|
||||||
"pool.min_size": int,
|
|
||||||
"pool.max_size": int,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
return {"default": db_config}
|
|
||||||
@@ -1,192 +0,0 @@
|
|||||||
import copy
|
|
||||||
import os
|
|
||||||
from collections.abc import Callable
|
|
||||||
from collections.abc import Mapping
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any
|
|
||||||
from typing import TypeVar
|
|
||||||
from typing import overload
|
|
||||||
|
|
||||||
T = TypeVar("T")
|
|
||||||
|
|
||||||
|
|
||||||
def str_to_bool(value: str) -> bool:
|
|
||||||
"""
|
|
||||||
Converts a string representation of truth to a boolean value.
|
|
||||||
|
|
||||||
Recognizes 'true', '1', 't', 'y', 'yes' as True, and
|
|
||||||
'false', '0', 'f', 'n', 'no' as False. Case-insensitive.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
value: The string to convert.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The boolean representation of the string.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If the string is not a recognized boolean value.
|
|
||||||
"""
|
|
||||||
val_lower = value.strip().lower()
|
|
||||||
if val_lower in ("true", "1", "t", "y", "yes"):
|
|
||||||
return True
|
|
||||||
elif val_lower in ("false", "0", "f", "n", "no"):
|
|
||||||
return False
|
|
||||||
raise ValueError(f"Cannot convert '{value}' to a boolean.")
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def get_int_from_env(key: str) -> int | None: ...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def get_int_from_env(key: str, default: None) -> int | None: ...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def get_int_from_env(key: str, default: int) -> int: ...
|
|
||||||
|
|
||||||
|
|
||||||
def get_int_from_env(key: str, default: int | None = None) -> int | None:
|
|
||||||
"""
|
|
||||||
Return an integer value based on the environment variable.
|
|
||||||
If default is provided, returns that value when key is missing.
|
|
||||||
If default is None, returns None when key is missing.
|
|
||||||
"""
|
|
||||||
if key not in os.environ:
|
|
||||||
return default
|
|
||||||
|
|
||||||
return int(os.environ[key])
|
|
||||||
|
|
||||||
|
|
||||||
def parse_dict_from_str(
|
|
||||||
env_str: str | None,
|
|
||||||
defaults: dict[str, Any] | None = None,
|
|
||||||
type_map: Mapping[str, Callable[[str], Any]] | None = None,
|
|
||||||
separator: str = ",",
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Parses a key-value string into a dictionary, applying defaults and casting types.
|
|
||||||
|
|
||||||
Supports nested keys via dot-notation, e.g.:
|
|
||||||
"database.host=localhost,database.port=5432"
|
|
||||||
|
|
||||||
Args:
|
|
||||||
env_str: The string from the environment variable (e.g., "port=9090,debug=true").
|
|
||||||
defaults: A dictionary of default values (can contain nested dicts).
|
|
||||||
type_map: A dictionary mapping keys (dot-notation allowed) to a type or a parsing
|
|
||||||
function (e.g., {'port': int, 'debug': bool, 'database.port': int}).
|
|
||||||
The special `bool` type triggers custom boolean parsing.
|
|
||||||
separator: The character used to separate key-value pairs. Defaults to ','.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A dictionary with the parsed and correctly-typed settings.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If a value cannot be cast to its specified type.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _set_nested(d: dict, keys: list[str], value: Any) -> None:
|
|
||||||
"""Set a nested value, creating intermediate dicts as needed."""
|
|
||||||
cur = d
|
|
||||||
for k in keys[:-1]:
|
|
||||||
if k not in cur or not isinstance(cur[k], dict):
|
|
||||||
cur[k] = {}
|
|
||||||
cur = cur[k]
|
|
||||||
cur[keys[-1]] = value
|
|
||||||
|
|
||||||
def _get_nested(d: dict, keys: list[str]) -> Any:
|
|
||||||
"""Get nested value or raise KeyError if not present."""
|
|
||||||
cur = d
|
|
||||||
for k in keys:
|
|
||||||
if not isinstance(cur, dict) or k not in cur:
|
|
||||||
raise KeyError
|
|
||||||
cur = cur[k]
|
|
||||||
return cur
|
|
||||||
|
|
||||||
def _has_nested(d: dict, keys: list[str]) -> bool:
|
|
||||||
try:
|
|
||||||
_get_nested(d, keys)
|
|
||||||
return True
|
|
||||||
except KeyError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
settings: dict[str, Any] = copy.deepcopy(defaults) if defaults else {}
|
|
||||||
_type_map = type_map if type_map else {}
|
|
||||||
|
|
||||||
if not env_str:
|
|
||||||
return settings
|
|
||||||
|
|
||||||
# Parse the environment string using the specified separator
|
|
||||||
pairs = [p.strip() for p in env_str.split(separator) if p.strip()]
|
|
||||||
for pair in pairs:
|
|
||||||
if "=" not in pair:
|
|
||||||
# ignore malformed pairs
|
|
||||||
continue
|
|
||||||
key, val = pair.split("=", 1)
|
|
||||||
key = key.strip()
|
|
||||||
val = val.strip()
|
|
||||||
if not key:
|
|
||||||
continue
|
|
||||||
parts = key.split(".")
|
|
||||||
_set_nested(settings, parts, val)
|
|
||||||
|
|
||||||
# Apply type casting to the updated settings (supports nested keys in type_map)
|
|
||||||
for key, caster in _type_map.items():
|
|
||||||
key_parts = key.split(".")
|
|
||||||
if _has_nested(settings, key_parts):
|
|
||||||
raw_val = _get_nested(settings, key_parts)
|
|
||||||
# Only cast if it's a string (i.e. from env parsing). If defaults already provided
|
|
||||||
# a different type we leave it as-is.
|
|
||||||
if isinstance(raw_val, str):
|
|
||||||
try:
|
|
||||||
if caster is bool:
|
|
||||||
parsed = str_to_bool(raw_val)
|
|
||||||
elif caster is Path:
|
|
||||||
parsed = Path(raw_val).resolve()
|
|
||||||
else:
|
|
||||||
parsed = caster(raw_val)
|
|
||||||
except (ValueError, TypeError) as e:
|
|
||||||
caster_name = getattr(caster, "__name__", repr(caster))
|
|
||||||
raise ValueError(
|
|
||||||
f"Error casting key '{key}' with value '{raw_val}' "
|
|
||||||
f"to type '{caster_name}'",
|
|
||||||
) from e
|
|
||||||
_set_nested(settings, key_parts, parsed)
|
|
||||||
|
|
||||||
return settings
|
|
||||||
|
|
||||||
|
|
||||||
def get_choice_from_env(
|
|
||||||
env_key: str,
|
|
||||||
choices: set[str],
|
|
||||||
default: str | None = None,
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Gets and validates an environment variable against a set of allowed choices.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
env_key: The environment variable key to validate
|
|
||||||
choices: Set of valid choices for the environment variable
|
|
||||||
default: Optional default value if environment variable is not set
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The validated environment variable value
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If the environment variable value is not in choices
|
|
||||||
or if no default is provided and env var is missing
|
|
||||||
"""
|
|
||||||
value = os.environ.get(env_key, default)
|
|
||||||
|
|
||||||
if value is None:
|
|
||||||
raise ValueError(
|
|
||||||
f"Environment variable '{env_key}' is required but not set.",
|
|
||||||
)
|
|
||||||
|
|
||||||
if value not in choices:
|
|
||||||
raise ValueError(
|
|
||||||
f"Environment variable '{env_key}' has invalid value '{value}'. "
|
|
||||||
f"Valid choices are: {', '.join(sorted(choices))}",
|
|
||||||
)
|
|
||||||
|
|
||||||
return value
|
|
||||||
@@ -1,266 +0,0 @@
|
|||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from pytest_mock import MockerFixture
|
|
||||||
|
|
||||||
from paperless.settings.custom import parse_db_settings
|
|
||||||
|
|
||||||
|
|
||||||
class TestParseDbSettings:
|
|
||||||
"""Test suite for parse_db_settings function."""
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("env_vars", "expected_database_settings"),
|
|
||||||
[
|
|
||||||
pytest.param(
|
|
||||||
{},
|
|
||||||
{
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.db.backends.sqlite3",
|
|
||||||
"NAME": None, # Will be replaced with tmp_path
|
|
||||||
"OPTIONS": {},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
id="default-sqlite",
|
|
||||||
),
|
|
||||||
pytest.param(
|
|
||||||
{
|
|
||||||
"PAPERLESS_DBENGINE": "sqlite",
|
|
||||||
"PAPERLESS_DB_OPTIONS": "timeout=30",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.db.backends.sqlite3",
|
|
||||||
"NAME": None, # Will be replaced with tmp_path
|
|
||||||
"OPTIONS": {
|
|
||||||
"timeout": 30,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
id="sqlite-with-timeout-override",
|
|
||||||
),
|
|
||||||
pytest.param(
|
|
||||||
{
|
|
||||||
"PAPERLESS_DBENGINE": "postgresql",
|
|
||||||
"PAPERLESS_DBHOST": "localhost",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.db.backends.postgresql",
|
|
||||||
"HOST": "localhost",
|
|
||||||
"NAME": "paperless",
|
|
||||||
"USER": "paperless",
|
|
||||||
"PASSWORD": "paperless",
|
|
||||||
"OPTIONS": {
|
|
||||||
"sslmode": "prefer",
|
|
||||||
"sslrootcert": None,
|
|
||||||
"sslcert": None,
|
|
||||||
"sslkey": None,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
id="postgresql-defaults",
|
|
||||||
),
|
|
||||||
pytest.param(
|
|
||||||
{
|
|
||||||
"PAPERLESS_DBENGINE": "postgresql",
|
|
||||||
"PAPERLESS_DBHOST": "paperless-db-host",
|
|
||||||
"PAPERLESS_DBPORT": "1111",
|
|
||||||
"PAPERLESS_DBNAME": "customdb",
|
|
||||||
"PAPERLESS_DBUSER": "customuser",
|
|
||||||
"PAPERLESS_DBPASS": "custompass",
|
|
||||||
"PAPERLESS_DB_OPTIONS": "pool.max_size=50;pool.min_size=2;sslmode=require",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.db.backends.postgresql",
|
|
||||||
"HOST": "paperless-db-host",
|
|
||||||
"PORT": 1111,
|
|
||||||
"NAME": "customdb",
|
|
||||||
"USER": "customuser",
|
|
||||||
"PASSWORD": "custompass",
|
|
||||||
"OPTIONS": {
|
|
||||||
"sslmode": "require",
|
|
||||||
"sslrootcert": None,
|
|
||||||
"sslcert": None,
|
|
||||||
"sslkey": None,
|
|
||||||
"pool": {
|
|
||||||
"min_size": 2,
|
|
||||||
"max_size": 50,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
id="postgresql-overrides",
|
|
||||||
),
|
|
||||||
pytest.param(
|
|
||||||
{
|
|
||||||
"PAPERLESS_DBENGINE": "postgresql",
|
|
||||||
"PAPERLESS_DBHOST": "pghost",
|
|
||||||
"PAPERLESS_DB_POOLSIZE": "10",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.db.backends.postgresql",
|
|
||||||
"HOST": "pghost",
|
|
||||||
"NAME": "paperless",
|
|
||||||
"USER": "paperless",
|
|
||||||
"PASSWORD": "paperless",
|
|
||||||
"OPTIONS": {
|
|
||||||
"sslmode": "prefer",
|
|
||||||
"sslrootcert": None,
|
|
||||||
"sslcert": None,
|
|
||||||
"sslkey": None,
|
|
||||||
"pool": {
|
|
||||||
"min_size": 1,
|
|
||||||
"max_size": 10,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
id="postgresql-legacy-poolsize",
|
|
||||||
),
|
|
||||||
pytest.param(
|
|
||||||
{
|
|
||||||
"PAPERLESS_DBENGINE": "postgresql",
|
|
||||||
"PAPERLESS_DBHOST": "pghost",
|
|
||||||
"PAPERLESS_DBSSLMODE": "require",
|
|
||||||
"PAPERLESS_DBSSLROOTCERT": "/certs/ca.crt",
|
|
||||||
"PAPERLESS_DB_TIMEOUT": "30",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.db.backends.postgresql",
|
|
||||||
"HOST": "pghost",
|
|
||||||
"NAME": "paperless",
|
|
||||||
"USER": "paperless",
|
|
||||||
"PASSWORD": "paperless",
|
|
||||||
"OPTIONS": {
|
|
||||||
"sslmode": "require",
|
|
||||||
"sslrootcert": "/certs/ca.crt",
|
|
||||||
"sslcert": None,
|
|
||||||
"sslkey": None,
|
|
||||||
"connect_timeout": 30,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
id="postgresql-legacy-ssl-and-timeout",
|
|
||||||
),
|
|
||||||
pytest.param(
|
|
||||||
{
|
|
||||||
"PAPERLESS_DBENGINE": "mariadb",
|
|
||||||
"PAPERLESS_DBHOST": "localhost",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.db.backends.mysql",
|
|
||||||
"HOST": "localhost",
|
|
||||||
"NAME": "paperless",
|
|
||||||
"USER": "paperless",
|
|
||||||
"PASSWORD": "paperless",
|
|
||||||
"OPTIONS": {
|
|
||||||
"read_default_file": "/etc/mysql/my.cnf",
|
|
||||||
"charset": "utf8mb4",
|
|
||||||
"collation": "utf8mb4_unicode_ci",
|
|
||||||
"ssl_mode": "PREFERRED",
|
|
||||||
"ssl": {
|
|
||||||
"ca": None,
|
|
||||||
"cert": None,
|
|
||||||
"key": None,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
id="mariadb-defaults",
|
|
||||||
),
|
|
||||||
pytest.param(
|
|
||||||
{
|
|
||||||
"PAPERLESS_DBENGINE": "mariadb",
|
|
||||||
"PAPERLESS_DBHOST": "paperless-mariadb-host",
|
|
||||||
"PAPERLESS_DBPORT": "5555",
|
|
||||||
"PAPERLESS_DBUSER": "my-cool-user",
|
|
||||||
"PAPERLESS_DBPASS": "my-secure-password",
|
|
||||||
"PAPERLESS_DB_OPTIONS": "ssl.ca=/path/to/ca.pem;ssl_mode=REQUIRED",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.db.backends.mysql",
|
|
||||||
"HOST": "paperless-mariadb-host",
|
|
||||||
"PORT": 5555,
|
|
||||||
"NAME": "paperless",
|
|
||||||
"USER": "my-cool-user",
|
|
||||||
"PASSWORD": "my-secure-password",
|
|
||||||
"OPTIONS": {
|
|
||||||
"read_default_file": "/etc/mysql/my.cnf",
|
|
||||||
"charset": "utf8mb4",
|
|
||||||
"collation": "utf8mb4_unicode_ci",
|
|
||||||
"ssl_mode": "REQUIRED",
|
|
||||||
"ssl": {
|
|
||||||
"ca": "/path/to/ca.pem",
|
|
||||||
"cert": None,
|
|
||||||
"key": None,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
id="mariadb-overrides",
|
|
||||||
),
|
|
||||||
pytest.param(
|
|
||||||
{
|
|
||||||
"PAPERLESS_DBENGINE": "mariadb",
|
|
||||||
"PAPERLESS_DBHOST": "mariahost",
|
|
||||||
"PAPERLESS_DBSSLMODE": "REQUIRED",
|
|
||||||
"PAPERLESS_DBSSLROOTCERT": "/certs/ca.pem",
|
|
||||||
"PAPERLESS_DBSSLCERT": "/certs/client.pem",
|
|
||||||
"PAPERLESS_DBSSLKEY": "/certs/client.key",
|
|
||||||
"PAPERLESS_DB_TIMEOUT": "25",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.db.backends.mysql",
|
|
||||||
"HOST": "mariahost",
|
|
||||||
"NAME": "paperless",
|
|
||||||
"USER": "paperless",
|
|
||||||
"PASSWORD": "paperless",
|
|
||||||
"OPTIONS": {
|
|
||||||
"read_default_file": "/etc/mysql/my.cnf",
|
|
||||||
"charset": "utf8mb4",
|
|
||||||
"collation": "utf8mb4_unicode_ci",
|
|
||||||
"ssl_mode": "REQUIRED",
|
|
||||||
"ssl": {
|
|
||||||
"ca": "/certs/ca.pem",
|
|
||||||
"cert": "/certs/client.pem",
|
|
||||||
"key": "/certs/client.key",
|
|
||||||
},
|
|
||||||
"connect_timeout": 25,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
id="mariadb-legacy-ssl-and-timeout",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_parse_db_settings(
|
|
||||||
self,
|
|
||||||
tmp_path: Path,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
env_vars: dict[str, str],
|
|
||||||
expected_database_settings: dict[str, dict],
|
|
||||||
) -> None:
|
|
||||||
"""Test various database configurations with defaults and overrides."""
|
|
||||||
# Clear environment and set test vars
|
|
||||||
mocker.patch.dict(os.environ, env_vars, clear=True)
|
|
||||||
|
|
||||||
# Update expected paths with actual tmp_path
|
|
||||||
if (
|
|
||||||
"default" in expected_database_settings
|
|
||||||
and expected_database_settings["default"]["NAME"] is None
|
|
||||||
):
|
|
||||||
expected_database_settings["default"]["NAME"] = str(
|
|
||||||
tmp_path / "db.sqlite3",
|
|
||||||
)
|
|
||||||
|
|
||||||
settings = parse_db_settings(tmp_path)
|
|
||||||
|
|
||||||
assert settings == expected_database_settings
|
|
||||||
@@ -1,414 +0,0 @@
|
|||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from pytest_mock import MockerFixture
|
|
||||||
|
|
||||||
from paperless.settings.parsers import get_choice_from_env
|
|
||||||
from paperless.settings.parsers import get_int_from_env
|
|
||||||
from paperless.settings.parsers import parse_dict_from_str
|
|
||||||
from paperless.settings.parsers import str_to_bool
|
|
||||||
|
|
||||||
|
|
||||||
class TestStringToBool:
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"true_value",
|
|
||||||
[
|
|
||||||
pytest.param("true", id="lowercase_true"),
|
|
||||||
pytest.param("1", id="digit_1"),
|
|
||||||
pytest.param("T", id="capital_T"),
|
|
||||||
pytest.param("y", id="lowercase_y"),
|
|
||||||
pytest.param("YES", id="uppercase_YES"),
|
|
||||||
pytest.param(" True ", id="whitespace_true"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_true_conversion(self, true_value: str):
|
|
||||||
"""Test that various 'true' strings correctly evaluate to True."""
|
|
||||||
assert str_to_bool(true_value) is True
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"false_value",
|
|
||||||
[
|
|
||||||
pytest.param("false", id="lowercase_false"),
|
|
||||||
pytest.param("0", id="digit_0"),
|
|
||||||
pytest.param("f", id="capital_f"),
|
|
||||||
pytest.param("N", id="capital_N"),
|
|
||||||
pytest.param("no", id="lowercase_no"),
|
|
||||||
pytest.param(" False ", id="whitespace_false"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_false_conversion(self, false_value: str):
|
|
||||||
"""Test that various 'false' strings correctly evaluate to False."""
|
|
||||||
assert str_to_bool(false_value) is False
|
|
||||||
|
|
||||||
def test_invalid_conversion(self):
|
|
||||||
"""Test that an invalid string raises a ValueError."""
|
|
||||||
with pytest.raises(ValueError, match="Cannot convert 'maybe' to a boolean\\."):
|
|
||||||
str_to_bool("maybe")
|
|
||||||
|
|
||||||
|
|
||||||
class TestParseDictFromString:
|
|
||||||
def test_empty_and_none_input(self):
|
|
||||||
"""Test behavior with None or empty string input."""
|
|
||||||
assert parse_dict_from_str(None) == {}
|
|
||||||
assert parse_dict_from_str("") == {}
|
|
||||||
defaults = {"a": 1}
|
|
||||||
res = parse_dict_from_str(None, defaults=defaults)
|
|
||||||
assert res == defaults
|
|
||||||
# Ensure it returns a copy, not the original object
|
|
||||||
assert res is not defaults
|
|
||||||
|
|
||||||
def test_basic_parsing(self):
|
|
||||||
"""Test simple key-value parsing without defaults or types."""
|
|
||||||
env_str = "key1=val1, key2=val2"
|
|
||||||
expected = {"key1": "val1", "key2": "val2"}
|
|
||||||
assert parse_dict_from_str(env_str) == expected
|
|
||||||
|
|
||||||
def test_with_defaults(self):
|
|
||||||
"""Test that environment values override defaults correctly."""
|
|
||||||
defaults = {"host": "localhost", "port": 8000, "user": "default"}
|
|
||||||
env_str = "port=9090, host=db.example.com"
|
|
||||||
expected = {"host": "db.example.com", "port": "9090", "user": "default"}
|
|
||||||
result = parse_dict_from_str(env_str, defaults=defaults)
|
|
||||||
assert result == expected
|
|
||||||
|
|
||||||
def test_type_casting(self):
|
|
||||||
"""Test successful casting of values to specified types."""
|
|
||||||
env_str = "port=9090, debug=true, timeout=12.5, user=admin"
|
|
||||||
type_map = {"port": int, "debug": bool, "timeout": float}
|
|
||||||
expected = {"port": 9090, "debug": True, "timeout": 12.5, "user": "admin"}
|
|
||||||
result = parse_dict_from_str(env_str, type_map=type_map)
|
|
||||||
assert result == expected
|
|
||||||
|
|
||||||
def test_type_casting_with_defaults(self):
|
|
||||||
"""Test casting when values come from both defaults and env string."""
|
|
||||||
defaults = {"port": 8000, "debug": False, "retries": 3}
|
|
||||||
env_str = "port=9090, debug=true"
|
|
||||||
type_map = {"port": int, "debug": bool, "retries": int}
|
|
||||||
|
|
||||||
# The 'retries' value comes from defaults and is already an int,
|
|
||||||
# so it should not be processed by the caster.
|
|
||||||
expected = {"port": 9090, "debug": True, "retries": 3}
|
|
||||||
result = parse_dict_from_str(env_str, defaults=defaults, type_map=type_map)
|
|
||||||
assert result == expected
|
|
||||||
assert isinstance(result["retries"], int)
|
|
||||||
|
|
||||||
def test_path_casting(self, tmp_path: Path):
|
|
||||||
"""Test successful casting of a string to a resolved pathlib.Path object."""
|
|
||||||
# Create a dummy file to resolve against
|
|
||||||
test_file = tmp_path / "test_file.txt"
|
|
||||||
test_file.touch()
|
|
||||||
|
|
||||||
env_str = f"config_path={test_file}"
|
|
||||||
type_map = {"config_path": Path}
|
|
||||||
result = parse_dict_from_str(env_str, type_map=type_map)
|
|
||||||
|
|
||||||
# The result should be a resolved Path object
|
|
||||||
assert isinstance(result["config_path"], Path)
|
|
||||||
assert result["config_path"] == test_file.resolve()
|
|
||||||
|
|
||||||
def test_custom_separator(self):
|
|
||||||
"""Test parsing with a custom separator like a semicolon."""
|
|
||||||
env_str = "host=db; port=5432; user=test"
|
|
||||||
expected = {"host": "db", "port": "5432", "user": "test"}
|
|
||||||
result = parse_dict_from_str(env_str, separator=";")
|
|
||||||
assert result == expected
|
|
||||||
|
|
||||||
def test_edge_cases_in_string(self):
|
|
||||||
"""Test malformed strings to ensure robustness."""
|
|
||||||
# Malformed pair 'debug' is skipped, extra comma is ignored
|
|
||||||
env_str = "key=val,, debug, foo=bar"
|
|
||||||
expected = {"key": "val", "foo": "bar"}
|
|
||||||
assert parse_dict_from_str(env_str) == expected
|
|
||||||
|
|
||||||
# Value can contain the equals sign
|
|
||||||
env_str = "url=postgres://user:pass@host:5432/db"
|
|
||||||
expected = {"url": "postgres://user:pass@host:5432/db"}
|
|
||||||
assert parse_dict_from_str(env_str) == expected
|
|
||||||
|
|
||||||
def test_casting_error_handling(self):
|
|
||||||
"""Test that a ValueError is raised for invalid casting."""
|
|
||||||
env_str = "port=not-a-number"
|
|
||||||
type_map = {"port": int}
|
|
||||||
|
|
||||||
with pytest.raises(ValueError) as excinfo:
|
|
||||||
parse_dict_from_str(env_str, type_map=type_map)
|
|
||||||
|
|
||||||
assert "Error casting key 'port'" in str(excinfo.value)
|
|
||||||
assert "value 'not-a-number'" in str(excinfo.value)
|
|
||||||
assert "to type 'int'" in str(excinfo.value)
|
|
||||||
|
|
||||||
def test_bool_casting_error(self):
|
|
||||||
"""Test that an invalid boolean string raises a ValueError."""
|
|
||||||
env_str = "debug=maybe"
|
|
||||||
type_map = {"debug": bool}
|
|
||||||
with pytest.raises(ValueError, match="Error casting key 'debug'"):
|
|
||||||
parse_dict_from_str(env_str, type_map=type_map)
|
|
||||||
|
|
||||||
def test_nested_key_parsing_basic(self):
|
|
||||||
"""Basic nested key parsing using dot-notation."""
|
|
||||||
env_str = "database.host=db.example.com, database.port=5432, logging.level=INFO"
|
|
||||||
result = parse_dict_from_str(env_str)
|
|
||||||
assert result == {
|
|
||||||
"database": {"host": "db.example.com", "port": "5432"},
|
|
||||||
"logging": {"level": "INFO"},
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_nested_overrides_defaults_and_deepcopy(self):
|
|
||||||
"""Nested env keys override defaults and defaults are deep-copied."""
|
|
||||||
defaults = {"database": {"host": "127.0.0.1", "port": 3306, "user": "default"}}
|
|
||||||
env_str = "database.host=db.example.com, debug=true"
|
|
||||||
result = parse_dict_from_str(
|
|
||||||
env_str,
|
|
||||||
defaults=defaults,
|
|
||||||
type_map={"debug": bool},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert result["database"]["host"] == "db.example.com"
|
|
||||||
# Unchanged default preserved
|
|
||||||
assert result["database"]["port"] == 3306
|
|
||||||
assert result["database"]["user"] == "default"
|
|
||||||
# Default object was deep-copied (no same nested object identity)
|
|
||||||
assert result is not defaults
|
|
||||||
assert result["database"] is not defaults["database"]
|
|
||||||
|
|
||||||
def test_nested_type_casting(self):
|
|
||||||
"""Type casting for nested keys (dot-notation) should work."""
|
|
||||||
env_str = "database.host=db.example.com, database.port=5433, debug=false"
|
|
||||||
type_map = {"database.port": int, "debug": bool}
|
|
||||||
result = parse_dict_from_str(env_str, type_map=type_map)
|
|
||||||
|
|
||||||
assert result["database"]["host"] == "db.example.com"
|
|
||||||
assert result["database"]["port"] == 5433
|
|
||||||
assert isinstance(result["database"]["port"], int)
|
|
||||||
assert result["debug"] is False
|
|
||||||
assert isinstance(result["debug"], bool)
|
|
||||||
|
|
||||||
def test_nested_casting_error_message(self):
|
|
||||||
"""Error messages should include the full dotted key name on failure."""
|
|
||||||
env_str = "database.port=not-a-number"
|
|
||||||
type_map = {"database.port": int}
|
|
||||||
with pytest.raises(ValueError) as excinfo:
|
|
||||||
parse_dict_from_str(env_str, type_map=type_map)
|
|
||||||
|
|
||||||
msg = str(excinfo.value)
|
|
||||||
assert "Error casting key 'database.port'" in msg
|
|
||||||
assert "value 'not-a-number'" in msg
|
|
||||||
assert "to type 'int'" in msg
|
|
||||||
|
|
||||||
def test_type_map_does_not_recast_non_string_defaults(self):
|
|
||||||
"""If a default already provides a non-string value, the caster should skip it."""
|
|
||||||
defaults = {"database": {"port": 3306}}
|
|
||||||
type_map = {"database.port": int}
|
|
||||||
result = parse_dict_from_str(None, defaults=defaults, type_map=type_map)
|
|
||||||
assert result["database"]["port"] == 3306
|
|
||||||
assert isinstance(result["database"]["port"], int)
|
|
||||||
|
|
||||||
|
|
||||||
class TestGetIntFromEnv:
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("env_value", "expected"),
|
|
||||||
[
|
|
||||||
pytest.param("42", 42, id="positive"),
|
|
||||||
pytest.param("-10", -10, id="negative"),
|
|
||||||
pytest.param("0", 0, id="zero"),
|
|
||||||
pytest.param("999", 999, id="large_positive"),
|
|
||||||
pytest.param("-999", -999, id="large_negative"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_existing_env_var_valid_ints(self, mocker, env_value, expected):
|
|
||||||
"""Test that existing environment variables with valid integers return correct values."""
|
|
||||||
mocker.patch.dict(os.environ, {"INT_VAR": env_value})
|
|
||||||
assert get_int_from_env("INT_VAR") == expected
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("default", "expected"),
|
|
||||||
[
|
|
||||||
pytest.param(100, 100, id="positive_default"),
|
|
||||||
pytest.param(0, 0, id="zero_default"),
|
|
||||||
pytest.param(-50, -50, id="negative_default"),
|
|
||||||
pytest.param(None, None, id="none_default"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_missing_env_var_with_defaults(self, mocker, default, expected):
|
|
||||||
"""Test that missing environment variables return provided defaults."""
|
|
||||||
mocker.patch.dict(os.environ, {}, clear=True)
|
|
||||||
assert get_int_from_env("MISSING_VAR", default=default) == expected
|
|
||||||
|
|
||||||
def test_missing_env_var_no_default(self, mocker):
|
|
||||||
"""Test that missing environment variable with no default returns None."""
|
|
||||||
mocker.patch.dict(os.environ, {}, clear=True)
|
|
||||||
assert get_int_from_env("MISSING_VAR") is None
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"invalid_value",
|
|
||||||
[
|
|
||||||
pytest.param("not_a_number", id="text"),
|
|
||||||
pytest.param("42.5", id="float"),
|
|
||||||
pytest.param("42a", id="alpha_suffix"),
|
|
||||||
pytest.param("", id="empty"),
|
|
||||||
pytest.param(" ", id="whitespace"),
|
|
||||||
pytest.param("true", id="boolean"),
|
|
||||||
pytest.param("1.0", id="decimal"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_invalid_int_values_raise_error(self, mocker, invalid_value):
|
|
||||||
"""Test that invalid integer values raise ValueError."""
|
|
||||||
mocker.patch.dict(os.environ, {"INVALID_INT": invalid_value})
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
get_int_from_env("INVALID_INT")
|
|
||||||
|
|
||||||
|
|
||||||
class TestGetEnvChoice:
|
|
||||||
@pytest.fixture
|
|
||||||
def valid_choices(self) -> set[str]:
|
|
||||||
"""Fixture providing a set of valid environment choices."""
|
|
||||||
return {"development", "staging", "production"}
|
|
||||||
|
|
||||||
def test_returns_valid_env_value(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
valid_choices: set[str],
|
|
||||||
) -> None:
|
|
||||||
"""Test that function returns the environment value when it's valid."""
|
|
||||||
mocker.patch.dict("os.environ", {"TEST_ENV": "development"})
|
|
||||||
|
|
||||||
result = get_choice_from_env("TEST_ENV", valid_choices)
|
|
||||||
|
|
||||||
assert result == "development"
|
|
||||||
|
|
||||||
def test_returns_default_when_env_not_set(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
valid_choices: set[str],
|
|
||||||
) -> None:
|
|
||||||
"""Test that function returns default value when env var is not set."""
|
|
||||||
mocker.patch.dict("os.environ", {}, clear=True)
|
|
||||||
|
|
||||||
result = get_choice_from_env("TEST_ENV", valid_choices, default="staging")
|
|
||||||
|
|
||||||
assert result == "staging"
|
|
||||||
|
|
||||||
def test_raises_error_when_env_not_set_and_no_default(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
valid_choices: set[str],
|
|
||||||
) -> None:
|
|
||||||
"""Test that function raises ValueError when env var is missing and no default."""
|
|
||||||
mocker.patch.dict("os.environ", {}, clear=True)
|
|
||||||
|
|
||||||
with pytest.raises(ValueError) as exc_info:
|
|
||||||
get_choice_from_env("TEST_ENV", valid_choices)
|
|
||||||
|
|
||||||
assert "Environment variable 'TEST_ENV' is required but not set" in str(
|
|
||||||
exc_info.value,
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_raises_error_when_env_value_invalid(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
valid_choices: set[str],
|
|
||||||
) -> None:
|
|
||||||
"""Test that function raises ValueError when env value is not in choices."""
|
|
||||||
mocker.patch.dict("os.environ", {"TEST_ENV": "invalid_value"})
|
|
||||||
|
|
||||||
with pytest.raises(ValueError) as exc_info:
|
|
||||||
get_choice_from_env("TEST_ENV", valid_choices)
|
|
||||||
|
|
||||||
error_msg = str(exc_info.value)
|
|
||||||
assert (
|
|
||||||
"Environment variable 'TEST_ENV' has invalid value 'invalid_value'"
|
|
||||||
in error_msg
|
|
||||||
)
|
|
||||||
assert "Valid choices are:" in error_msg
|
|
||||||
assert "development" in error_msg
|
|
||||||
assert "staging" in error_msg
|
|
||||||
assert "production" in error_msg
|
|
||||||
|
|
||||||
def test_raises_error_when_default_invalid(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
valid_choices: set[str],
|
|
||||||
) -> None:
|
|
||||||
"""Test that function raises ValueError when default value is not in choices."""
|
|
||||||
mocker.patch.dict("os.environ", {}, clear=True)
|
|
||||||
|
|
||||||
with pytest.raises(ValueError) as exc_info:
|
|
||||||
get_choice_from_env("TEST_ENV", valid_choices, default="invalid_default")
|
|
||||||
|
|
||||||
error_msg = str(exc_info.value)
|
|
||||||
assert (
|
|
||||||
"Environment variable 'TEST_ENV' has invalid value 'invalid_default'"
|
|
||||||
in error_msg
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_case_sensitive_validation(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
valid_choices: set[str],
|
|
||||||
) -> None:
|
|
||||||
"""Test that validation is case sensitive."""
|
|
||||||
mocker.patch.dict("os.environ", {"TEST_ENV": "DEVELOPMENT"})
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
get_choice_from_env("TEST_ENV", valid_choices)
|
|
||||||
|
|
||||||
def test_empty_string_env_value(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
valid_choices: set[str],
|
|
||||||
) -> None:
|
|
||||||
"""Test behavior with empty string environment value."""
|
|
||||||
mocker.patch.dict("os.environ", {"TEST_ENV": ""})
|
|
||||||
|
|
||||||
with pytest.raises(ValueError) as exc_info:
|
|
||||||
get_choice_from_env("TEST_ENV", valid_choices)
|
|
||||||
|
|
||||||
assert "has invalid value ''" in str(exc_info.value)
|
|
||||||
|
|
||||||
def test_whitespace_env_value(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
valid_choices: set[str],
|
|
||||||
) -> None:
|
|
||||||
"""Test behavior with whitespace-only environment value."""
|
|
||||||
mocker.patch.dict("os.environ", {"TEST_ENV": " development "})
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
get_choice_from_env("TEST_ENV", valid_choices)
|
|
||||||
|
|
||||||
def test_single_choice_set(self, mocker: MockerFixture) -> None:
|
|
||||||
"""Test function works correctly with single choice set."""
|
|
||||||
single_choice: set[str] = {"production"}
|
|
||||||
mocker.patch.dict("os.environ", {"TEST_ENV": "production"})
|
|
||||||
|
|
||||||
result = get_choice_from_env("TEST_ENV", single_choice)
|
|
||||||
|
|
||||||
assert result == "production"
|
|
||||||
|
|
||||||
def test_large_choice_set(self, mocker: MockerFixture) -> None:
|
|
||||||
"""Test function works correctly with large choice set."""
|
|
||||||
large_choices: set[str] = {f"option_{i}" for i in range(100)}
|
|
||||||
mocker.patch.dict("os.environ", {"TEST_ENV": "option_50"})
|
|
||||||
|
|
||||||
result = get_choice_from_env("TEST_ENV", large_choices)
|
|
||||||
|
|
||||||
assert result == "option_50"
|
|
||||||
|
|
||||||
def test_different_env_keys(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
valid_choices: set[str],
|
|
||||||
) -> None:
|
|
||||||
"""Test function works with different environment variable keys."""
|
|
||||||
test_cases = [
|
|
||||||
("DJANGO_ENV", "development"),
|
|
||||||
("DATABASE_BACKEND", "staging"),
|
|
||||||
("LOG_LEVEL", "production"),
|
|
||||||
("APP_MODE", "development"),
|
|
||||||
]
|
|
||||||
|
|
||||||
for env_key, env_value in test_cases:
|
|
||||||
mocker.patch.dict("os.environ", {env_key: env_value})
|
|
||||||
result = get_choice_from_env(env_key, valid_choices)
|
|
||||||
assert result == env_value
|
|
||||||
@@ -78,15 +78,11 @@ class TestCustomAccountAdapter(TestCase):
|
|||||||
adapter = get_adapter()
|
adapter = get_adapter()
|
||||||
|
|
||||||
# Test when PAPERLESS_URL is None
|
# Test when PAPERLESS_URL is None
|
||||||
with override_settings(
|
expected_url = f"https://foo.org{reverse('account_reset_password_from_key', kwargs={'uidb36': 'UID', 'key': 'KEY'})}"
|
||||||
PAPERLESS_URL=None,
|
self.assertEqual(
|
||||||
ACCOUNT_DEFAULT_HTTP_PROTOCOL="https",
|
adapter.get_reset_password_from_key_url("UID-KEY"),
|
||||||
):
|
expected_url,
|
||||||
expected_url = f"https://foo.org{reverse('account_reset_password_from_key', kwargs={'uidb36': 'UID', 'key': 'KEY'})}"
|
)
|
||||||
self.assertEqual(
|
|
||||||
adapter.get_reset_password_from_key_url("UID-KEY"),
|
|
||||||
expected_url,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Test when PAPERLESS_URL is not None
|
# Test when PAPERLESS_URL is not None
|
||||||
with override_settings(PAPERLESS_URL="https://bar.com"):
|
with override_settings(PAPERLESS_URL="https://bar.com"):
|
||||||
|
|||||||
@@ -2,17 +2,13 @@ import os
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
import pytest
|
|
||||||
from django.core.checks import Warning
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from django.test import override_settings
|
from django.test import override_settings
|
||||||
from pytest_mock import MockerFixture
|
|
||||||
|
|
||||||
from documents.tests.utils import DirectoriesMixin
|
from documents.tests.utils import DirectoriesMixin
|
||||||
from documents.tests.utils import FileSystemAssertsMixin
|
from documents.tests.utils import FileSystemAssertsMixin
|
||||||
from paperless.checks import audit_log_check
|
from paperless.checks import audit_log_check
|
||||||
from paperless.checks import binaries_check
|
from paperless.checks import binaries_check
|
||||||
from paperless.checks import check_deprecated_db_settings
|
|
||||||
from paperless.checks import debug_mode_check
|
from paperless.checks import debug_mode_check
|
||||||
from paperless.checks import paths_check
|
from paperless.checks import paths_check
|
||||||
from paperless.checks import settings_values_check
|
from paperless.checks import settings_values_check
|
||||||
@@ -241,157 +237,3 @@ class TestAuditLogChecks(TestCase):
|
|||||||
("auditlog table was found but audit log is disabled."),
|
("auditlog table was found but audit log is disabled."),
|
||||||
msg.msg,
|
msg.msg,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
DEPRECATED_VARS: dict[str, str] = {
|
|
||||||
"PAPERLESS_DB_TIMEOUT": "timeout",
|
|
||||||
"PAPERLESS_DB_POOLSIZE": "pool.min_size / pool.max_size",
|
|
||||||
"PAPERLESS_DBSSLMODE": "sslmode",
|
|
||||||
"PAPERLESS_DBSSLROOTCERT": "sslrootcert",
|
|
||||||
"PAPERLESS_DBSSLCERT": "sslcert",
|
|
||||||
"PAPERLESS_DBSSLKEY": "sslkey",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class TestDeprecatedDbSettings:
|
|
||||||
"""Test suite for the check_deprecated_db_settings system check."""
|
|
||||||
|
|
||||||
def test_no_deprecated_vars_returns_empty(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
) -> None:
|
|
||||||
"""No warnings when none of the deprecated vars are present."""
|
|
||||||
# clear=True ensures vars from the outer test environment do not leak in
|
|
||||||
mocker.patch.dict(os.environ, {}, clear=True)
|
|
||||||
result = check_deprecated_db_settings(None)
|
|
||||||
assert result == []
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("env_var", "db_option_key"),
|
|
||||||
[
|
|
||||||
("PAPERLESS_DB_TIMEOUT", "timeout"),
|
|
||||||
("PAPERLESS_DB_POOLSIZE", "pool.min_size / pool.max_size"),
|
|
||||||
("PAPERLESS_DBSSLMODE", "sslmode"),
|
|
||||||
("PAPERLESS_DBSSLROOTCERT", "sslrootcert"),
|
|
||||||
("PAPERLESS_DBSSLCERT", "sslcert"),
|
|
||||||
("PAPERLESS_DBSSLKEY", "sslkey"),
|
|
||||||
],
|
|
||||||
ids=[
|
|
||||||
"db-timeout",
|
|
||||||
"db-poolsize",
|
|
||||||
"ssl-mode",
|
|
||||||
"ssl-rootcert",
|
|
||||||
"ssl-cert",
|
|
||||||
"ssl-key",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_single_deprecated_var_produces_one_warning(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
env_var: str,
|
|
||||||
db_option_key: str,
|
|
||||||
) -> None:
|
|
||||||
"""Each deprecated var in isolation produces exactly one warning."""
|
|
||||||
mocker.patch.dict(os.environ, {env_var: "some_value"}, clear=True)
|
|
||||||
result = check_deprecated_db_settings(None)
|
|
||||||
|
|
||||||
assert len(result) == 1
|
|
||||||
warning = result[0]
|
|
||||||
assert isinstance(warning, Warning)
|
|
||||||
assert warning.id == "paperless.W001"
|
|
||||||
assert env_var in warning.hint
|
|
||||||
assert db_option_key in warning.hint
|
|
||||||
|
|
||||||
def test_multiple_deprecated_vars_produce_one_warning_each(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
) -> None:
|
|
||||||
"""Each deprecated var present in the environment gets its own warning."""
|
|
||||||
set_vars = {
|
|
||||||
"PAPERLESS_DB_TIMEOUT": "30",
|
|
||||||
"PAPERLESS_DB_POOLSIZE": "10",
|
|
||||||
"PAPERLESS_DBSSLMODE": "require",
|
|
||||||
}
|
|
||||||
mocker.patch.dict(os.environ, set_vars, clear=True)
|
|
||||||
result = check_deprecated_db_settings(None)
|
|
||||||
|
|
||||||
assert len(result) == len(set_vars)
|
|
||||||
assert all(isinstance(w, Warning) for w in result)
|
|
||||||
assert all(w.id == "paperless.W001" for w in result)
|
|
||||||
all_hints = " ".join(w.hint for w in result)
|
|
||||||
for var_name in set_vars:
|
|
||||||
assert var_name in all_hints
|
|
||||||
|
|
||||||
def test_all_deprecated_vars_produces_one_warning_each(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
) -> None:
|
|
||||||
"""All deprecated vars set simultaneously produces one warning per var."""
|
|
||||||
all_vars = dict.fromkeys(DEPRECATED_VARS, "some_value")
|
|
||||||
mocker.patch.dict(os.environ, all_vars, clear=True)
|
|
||||||
result = check_deprecated_db_settings(None)
|
|
||||||
|
|
||||||
assert len(result) == len(DEPRECATED_VARS)
|
|
||||||
assert all(isinstance(w, Warning) for w in result)
|
|
||||||
assert all(w.id == "paperless.W001" for w in result)
|
|
||||||
|
|
||||||
def test_unset_vars_not_mentioned_in_warnings(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
) -> None:
|
|
||||||
"""Vars absent from the environment do not appear in any warning."""
|
|
||||||
mocker.patch.dict(
|
|
||||||
os.environ,
|
|
||||||
{"PAPERLESS_DB_TIMEOUT": "30"},
|
|
||||||
clear=True,
|
|
||||||
)
|
|
||||||
result = check_deprecated_db_settings(None)
|
|
||||||
|
|
||||||
assert len(result) == 1
|
|
||||||
assert "PAPERLESS_DB_TIMEOUT" in result[0].hint
|
|
||||||
unset_vars = [v for v in DEPRECATED_VARS if v != "PAPERLESS_DB_TIMEOUT"]
|
|
||||||
for var_name in unset_vars:
|
|
||||||
assert var_name not in result[0].hint
|
|
||||||
|
|
||||||
def test_empty_string_var_not_treated_as_set(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
) -> None:
|
|
||||||
"""A var set to an empty string is not flagged as a deprecated setting."""
|
|
||||||
mocker.patch.dict(
|
|
||||||
os.environ,
|
|
||||||
{"PAPERLESS_DB_TIMEOUT": ""},
|
|
||||||
clear=True,
|
|
||||||
)
|
|
||||||
result = check_deprecated_db_settings(None)
|
|
||||||
assert result == []
|
|
||||||
|
|
||||||
def test_warning_mentions_migration_target(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
) -> None:
|
|
||||||
"""Each warning hints at PAPERLESS_DB_OPTIONS as the migration target."""
|
|
||||||
mocker.patch.dict(
|
|
||||||
os.environ,
|
|
||||||
{"PAPERLESS_DBSSLMODE": "require"},
|
|
||||||
clear=True,
|
|
||||||
)
|
|
||||||
result = check_deprecated_db_settings(None)
|
|
||||||
|
|
||||||
assert len(result) == 1
|
|
||||||
assert "PAPERLESS_DB_OPTIONS" in result[0].hint
|
|
||||||
|
|
||||||
def test_warning_message_identifies_var(
|
|
||||||
self,
|
|
||||||
mocker: MockerFixture,
|
|
||||||
) -> None:
|
|
||||||
"""The warning message (not just the hint) identifies the offending var."""
|
|
||||||
mocker.patch.dict(
|
|
||||||
os.environ,
|
|
||||||
{"PAPERLESS_DBSSLCERT": "/path/to/cert.pem"},
|
|
||||||
clear=True,
|
|
||||||
)
|
|
||||||
result = check_deprecated_db_settings(None)
|
|
||||||
|
|
||||||
assert len(result) == 1
|
|
||||||
assert "PAPERLESS_DBSSLCERT" in result[0].msg
|
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from celery.schedules import crontab
|
|||||||
from paperless.settings import _parse_base_paths
|
from paperless.settings import _parse_base_paths
|
||||||
from paperless.settings import _parse_beat_schedule
|
from paperless.settings import _parse_beat_schedule
|
||||||
from paperless.settings import _parse_dateparser_languages
|
from paperless.settings import _parse_dateparser_languages
|
||||||
|
from paperless.settings import _parse_db_settings
|
||||||
from paperless.settings import _parse_ignore_dates
|
from paperless.settings import _parse_ignore_dates
|
||||||
from paperless.settings import _parse_paperless_url
|
from paperless.settings import _parse_paperless_url
|
||||||
from paperless.settings import _parse_redis_url
|
from paperless.settings import _parse_redis_url
|
||||||
@@ -377,6 +378,64 @@ class TestCeleryScheduleParsing(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestDBSettings(TestCase):
|
||||||
|
def test_db_timeout_with_sqlite(self) -> None:
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- PAPERLESS_DB_TIMEOUT is set
|
||||||
|
WHEN:
|
||||||
|
- Settings are parsed
|
||||||
|
THEN:
|
||||||
|
- PAPERLESS_DB_TIMEOUT set for sqlite
|
||||||
|
"""
|
||||||
|
with mock.patch.dict(
|
||||||
|
os.environ,
|
||||||
|
{
|
||||||
|
"PAPERLESS_DB_TIMEOUT": "10",
|
||||||
|
},
|
||||||
|
):
|
||||||
|
databases = _parse_db_settings()
|
||||||
|
|
||||||
|
self.assertDictEqual(
|
||||||
|
{
|
||||||
|
"timeout": 10.0,
|
||||||
|
},
|
||||||
|
databases["default"]["OPTIONS"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_db_timeout_with_not_sqlite(self) -> None:
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- PAPERLESS_DB_TIMEOUT is set but db is not sqlite
|
||||||
|
WHEN:
|
||||||
|
- Settings are parsed
|
||||||
|
THEN:
|
||||||
|
- PAPERLESS_DB_TIMEOUT set correctly in non-sqlite db & for fallback sqlite db
|
||||||
|
"""
|
||||||
|
with mock.patch.dict(
|
||||||
|
os.environ,
|
||||||
|
{
|
||||||
|
"PAPERLESS_DBHOST": "127.0.0.1",
|
||||||
|
"PAPERLESS_DB_TIMEOUT": "10",
|
||||||
|
},
|
||||||
|
):
|
||||||
|
databases = _parse_db_settings()
|
||||||
|
|
||||||
|
self.assertDictEqual(
|
||||||
|
databases["default"]["OPTIONS"],
|
||||||
|
databases["default"]["OPTIONS"]
|
||||||
|
| {
|
||||||
|
"connect_timeout": 10.0,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertDictEqual(
|
||||||
|
{
|
||||||
|
"timeout": 10.0,
|
||||||
|
},
|
||||||
|
databases["sqlite"]["OPTIONS"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestPaperlessURLSettings(TestCase):
|
class TestPaperlessURLSettings(TestCase):
|
||||||
def test_paperless_url(self) -> None:
|
def test_paperless_url(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from django.test import override_settings
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
def test_favicon_view(client):
|
def test_favicon_view(client):
|
||||||
@@ -11,14 +11,15 @@ def test_favicon_view(client):
|
|||||||
favicon_path.parent.mkdir(parents=True, exist_ok=True)
|
favicon_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
favicon_path.write_bytes(b"FAKE ICON DATA")
|
favicon_path.write_bytes(b"FAKE ICON DATA")
|
||||||
|
|
||||||
with override_settings(STATIC_ROOT=static_dir):
|
settings.STATIC_ROOT = static_dir
|
||||||
response = client.get("/favicon.ico")
|
|
||||||
assert response.status_code == 200
|
response = client.get("/favicon.ico")
|
||||||
assert response["Content-Type"] == "image/x-icon"
|
assert response.status_code == 200
|
||||||
assert b"".join(response.streaming_content) == b"FAKE ICON DATA"
|
assert response["Content-Type"] == "image/x-icon"
|
||||||
|
assert b"".join(response.streaming_content) == b"FAKE ICON DATA"
|
||||||
|
|
||||||
|
|
||||||
def test_favicon_view_missing_file(client):
|
def test_favicon_view_missing_file(client):
|
||||||
with override_settings(STATIC_ROOT=Path(tempfile.mkdtemp())):
|
settings.STATIC_ROOT = Path(tempfile.mkdtemp())
|
||||||
response = client.get("/favicon.ico")
|
response = client.get("/favicon.ico")
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from typing import Final
|
from typing import Final
|
||||||
|
|
||||||
__version__: Final[tuple[int, int, int]] = (2, 20, 9)
|
__version__: Final[tuple[int, int, int]] = (2, 20, 8)
|
||||||
# Version string like X.Y.Z
|
# Version string like X.Y.Z
|
||||||
__full_version_str__: Final[str] = ".".join(map(str, __version__))
|
__full_version_str__: Final[str] = ".".join(map(str, __version__))
|
||||||
# Version string like X.Y
|
# Version string like X.Y
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ from pathlib import Path
|
|||||||
from bleach import clean
|
from bleach import clean
|
||||||
from bleach import linkify
|
from bleach import linkify
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone
|
|
||||||
from django.utils.timezone import is_naive
|
from django.utils.timezone import is_naive
|
||||||
from django.utils.timezone import make_aware
|
from django.utils.timezone import make_aware
|
||||||
from gotenberg_client import GotenbergClient
|
from gotenberg_client import GotenbergClient
|
||||||
@@ -333,9 +332,7 @@ class MailDocumentParser(DocumentParser):
|
|||||||
if data["attachments"]:
|
if data["attachments"]:
|
||||||
data["attachments_label"] = "Attachments"
|
data["attachments_label"] = "Attachments"
|
||||||
|
|
||||||
data["date"] = clean_html(
|
data["date"] = clean_html(mail.date.astimezone().strftime("%Y-%m-%d %H:%M"))
|
||||||
timezone.localtime(mail.date).strftime("%Y-%m-%d %H:%M"),
|
|
||||||
)
|
|
||||||
data["content"] = clean_html(mail.text.strip())
|
data["content"] = clean_html(mail.text.strip())
|
||||||
|
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
|
|||||||
1878
src/paperless_mail/templates/package-lock.json
generated
1878
src/paperless_mail/templates/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"tailwindcss": "^3.4.19"
|
"tailwindcss": "^3.4.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ from unittest import mock
|
|||||||
import httpx
|
import httpx
|
||||||
import pytest
|
import pytest
|
||||||
from django.test.html import parse_html
|
from django.test.html import parse_html
|
||||||
from django.utils import timezone
|
|
||||||
from pytest_django.fixtures import SettingsWrapper
|
from pytest_django.fixtures import SettingsWrapper
|
||||||
from pytest_httpx import HTTPXMock
|
from pytest_httpx import HTTPXMock
|
||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
@@ -635,14 +634,13 @@ class TestParser:
|
|||||||
THEN:
|
THEN:
|
||||||
- Resulting HTML is as expected
|
- Resulting HTML is as expected
|
||||||
"""
|
"""
|
||||||
with timezone.override("UTC"):
|
mail = mail_parser.parse_file_to_message(html_email_file)
|
||||||
mail = mail_parser.parse_file_to_message(html_email_file)
|
html_file = mail_parser.mail_to_html(mail)
|
||||||
html_file = mail_parser.mail_to_html(mail)
|
|
||||||
|
|
||||||
expected_html = parse_html(html_email_html_file.read_text())
|
expected_html = parse_html(html_email_html_file.read_text())
|
||||||
actual_html = parse_html(html_file.read_text())
|
actual_html = parse_html(html_file.read_text())
|
||||||
|
|
||||||
assert expected_html == actual_html
|
assert expected_html == actual_html
|
||||||
|
|
||||||
def test_generate_pdf_from_mail(
|
def test_generate_pdf_from_mail(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import unicodedata
|
|
||||||
import uuid
|
import uuid
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
@@ -848,18 +847,8 @@ class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
"application/pdf",
|
"application/pdf",
|
||||||
)
|
)
|
||||||
|
|
||||||
# OCR output for RTL text varies across platforms/versions due to
|
# Copied from the PDF to here. Don't even look at it
|
||||||
# bidi controls and presentation forms; normalize before assertion.
|
self.assertIn("ةﯾﻠﺧﺎدﻻ ةرازو", parser.get_text())
|
||||||
normalized_text = "".join(
|
|
||||||
char
|
|
||||||
for char in unicodedata.normalize("NFKC", parser.get_text())
|
|
||||||
if unicodedata.category(char) != "Cf" and not char.isspace()
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertIn("ةرازو", normalized_text)
|
|
||||||
self.assertTrue(
|
|
||||||
any(token in normalized_text for token in ("ةیلخادلا", "الاخليد")),
|
|
||||||
)
|
|
||||||
|
|
||||||
@mock.patch("ocrmypdf.ocr")
|
@mock.patch("ocrmypdf.ocr")
|
||||||
def test_gs_rendering_error(self, m) -> None:
|
def test_gs_rendering_error(self, m) -> None:
|
||||||
|
|||||||
@@ -18,10 +18,7 @@ nav = [
|
|||||||
"setup.md",
|
"setup.md",
|
||||||
"usage.md",
|
"usage.md",
|
||||||
"configuration.md",
|
"configuration.md",
|
||||||
{ Administration = [
|
"administration.md",
|
||||||
"administration.md",
|
|
||||||
{ "v3 Migration Guide" = "migration-v3.md" },
|
|
||||||
] },
|
|
||||||
"advanced_usage.md",
|
"advanced_usage.md",
|
||||||
"api.md",
|
"api.md",
|
||||||
"development.md",
|
"development.md",
|
||||||
|
|||||||
Reference in New Issue
Block a user