mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-03-09 18:51:25 +00:00
Compare commits
24 Commits
feature-sh
...
fix-versio
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d9628f7255 | ||
|
|
fcbe4b200c | ||
|
|
2b434916a0 | ||
|
|
d85ee29976 | ||
|
|
0c7d56c5e7 | ||
|
|
0bcf904e3a | ||
|
|
bcc2f11152 | ||
|
|
e18b1fd99d | ||
|
|
e30676f889 | ||
|
|
2a28549c5a | ||
|
|
4badf0e7c2 | ||
|
|
bc26d94593 | ||
|
|
93cbbf34b7 | ||
|
|
1e8622494d | ||
|
|
0c3298f030 | ||
|
|
2b288c094d | ||
|
|
2cdb1424ef | ||
|
|
f5c0c21922 | ||
|
|
91ddda9256 | ||
|
|
9d5e618de8 | ||
|
|
50ae49c7da | ||
|
|
ba023ef332 | ||
|
|
7345f2e81c | ||
|
|
731448a8f9 |
86
.github/workflows/ci-backend.yml
vendored
86
.github/workflows/ci-backend.yml
vendored
@@ -3,21 +3,9 @@ on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'pyproject.toml'
|
||||
- 'uv.lock'
|
||||
- 'docker/compose/docker-compose.ci-test.yml'
|
||||
- '.github/workflows/ci-backend.yml'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'pyproject.toml'
|
||||
- 'uv.lock'
|
||||
- 'docker/compose/docker-compose.ci-test.yml'
|
||||
- '.github/workflows/ci-backend.yml'
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: backend-${{ github.event.pull_request.number || github.ref }}
|
||||
@@ -26,7 +14,55 @@ env:
|
||||
DEFAULT_UV_VERSION: "0.10.x"
|
||||
NLTK_DATA: "/usr/share/nltk_data"
|
||||
jobs:
|
||||
changes:
|
||||
name: Detect Backend Changes
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
backend_changed: ${{ steps.force.outputs.run_all == 'true' || steps.filter.outputs.backend == 'true' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6.0.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Decide run mode
|
||||
id: force
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
echo "run_all=true" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ github.event_name }}" == "push" && ( "${{ github.ref_name }}" == "main" || "${{ github.ref_name }}" == "dev" ) ]]; then
|
||||
echo "run_all=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "run_all=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
- name: Set diff range
|
||||
id: range
|
||||
if: steps.force.outputs.run_all != 'true'
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "base=${{ github.event.pull_request.base.sha }}" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ github.event.created }}" == "true" ]]; then
|
||||
echo "base=${{ github.event.repository.default_branch }}" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "base=${{ github.event.before }}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
echo "ref=${{ github.sha }}" >> "$GITHUB_OUTPUT"
|
||||
- name: Detect changes
|
||||
id: filter
|
||||
if: steps.force.outputs.run_all != 'true'
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
with:
|
||||
base: ${{ steps.range.outputs.base }}
|
||||
ref: ${{ steps.range.outputs.ref }}
|
||||
filters: |
|
||||
backend:
|
||||
- 'src/**'
|
||||
- 'pyproject.toml'
|
||||
- 'uv.lock'
|
||||
- 'docker/compose/docker-compose.ci-test.yml'
|
||||
- '.github/workflows/ci-backend.yml'
|
||||
test:
|
||||
needs: changes
|
||||
if: needs.changes.outputs.backend_changed == 'true'
|
||||
name: "Python ${{ matrix.python-version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
@@ -100,6 +136,8 @@ jobs:
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml logs
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml down
|
||||
typing:
|
||||
needs: changes
|
||||
if: needs.changes.outputs.backend_changed == 'true'
|
||||
name: Check project typing
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
@@ -150,3 +188,27 @@ jobs:
|
||||
--show-error-codes \
|
||||
--warn-unused-configs \
|
||||
src/ | uv run mypy-baseline filter
|
||||
gate:
|
||||
name: Backend CI Gate
|
||||
needs: [changes, test, typing]
|
||||
if: always()
|
||||
runs-on: ubuntu-slim
|
||||
steps:
|
||||
- name: Check gate
|
||||
run: |
|
||||
if [[ "${{ needs.changes.outputs.backend_changed }}" != "true" ]]; then
|
||||
echo "No backend-relevant changes detected."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${{ needs.test.result }}" != "success" ]]; then
|
||||
echo "::error::Backend test job result: ${{ needs.test.result }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${{ needs.typing.result }}" != "success" ]]; then
|
||||
echo "::error::Backend typing job result: ${{ needs.typing.result }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Backend checks passed."
|
||||
|
||||
12
.github/workflows/ci-docker.yml
vendored
12
.github/workflows/ci-docker.yml
vendored
@@ -149,15 +149,16 @@ jobs:
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
echo "digest=${digest}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
echo "${digest}" > "/tmp/digests/digest-${{ matrix.arch }}.txt"
|
||||
- name: Upload digest
|
||||
if: steps.check-push.outputs.should-push == 'true'
|
||||
uses: actions/upload-artifact@v7.0.0
|
||||
with:
|
||||
name: digests-${{ matrix.arch }}
|
||||
path: /tmp/digests/*
|
||||
path: /tmp/digests/digest-${{ matrix.arch }}.txt
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
archive: false
|
||||
merge-and-push:
|
||||
name: Merge and Push Manifest
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -171,7 +172,7 @@ jobs:
|
||||
uses: actions/download-artifact@v8.0.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
pattern: digest-*.txt
|
||||
merge-multiple: true
|
||||
- name: List digests
|
||||
run: |
|
||||
@@ -217,8 +218,9 @@ jobs:
|
||||
tags=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "${DOCKER_METADATA_OUTPUT_JSON}")
|
||||
|
||||
digests=""
|
||||
for digest in *; do
|
||||
digests+="${{ env.REGISTRY }}/${REPOSITORY}@sha256:${digest} "
|
||||
for digest_file in digest-*.txt; do
|
||||
digest=$(cat "${digest_file}")
|
||||
digests+="${{ env.REGISTRY }}/${REPOSITORY}@${digest} "
|
||||
done
|
||||
|
||||
echo "Creating manifest with tags: ${tags}"
|
||||
|
||||
88
.github/workflows/ci-docs.yml
vendored
88
.github/workflows/ci-docs.yml
vendored
@@ -1,22 +1,9 @@
|
||||
name: Documentation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- dev
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'zensical.toml'
|
||||
- 'pyproject.toml'
|
||||
- 'uv.lock'
|
||||
- '.github/workflows/ci-docs.yml'
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'zensical.toml'
|
||||
- 'pyproject.toml'
|
||||
- 'uv.lock'
|
||||
- '.github/workflows/ci-docs.yml'
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: docs-${{ github.event.pull_request.number || github.ref }}
|
||||
@@ -29,7 +16,55 @@ env:
|
||||
DEFAULT_UV_VERSION: "0.10.x"
|
||||
DEFAULT_PYTHON_VERSION: "3.12"
|
||||
jobs:
|
||||
changes:
|
||||
name: Detect Docs Changes
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
docs_changed: ${{ steps.force.outputs.run_all == 'true' || steps.filter.outputs.docs == 'true' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6.0.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Decide run mode
|
||||
id: force
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
echo "run_all=true" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ github.event_name }}" == "push" && ( "${{ github.ref_name }}" == "main" || "${{ github.ref_name }}" == "dev" ) ]]; then
|
||||
echo "run_all=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "run_all=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
- name: Set diff range
|
||||
id: range
|
||||
if: steps.force.outputs.run_all != 'true'
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "base=${{ github.event.pull_request.base.sha }}" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ github.event.created }}" == "true" ]]; then
|
||||
echo "base=${{ github.event.repository.default_branch }}" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "base=${{ github.event.before }}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
echo "ref=${{ github.sha }}" >> "$GITHUB_OUTPUT"
|
||||
- name: Detect changes
|
||||
id: filter
|
||||
if: steps.force.outputs.run_all != 'true'
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
with:
|
||||
base: ${{ steps.range.outputs.base }}
|
||||
ref: ${{ steps.range.outputs.ref }}
|
||||
filters: |
|
||||
docs:
|
||||
- 'docs/**'
|
||||
- 'zensical.toml'
|
||||
- 'pyproject.toml'
|
||||
- 'uv.lock'
|
||||
- '.github/workflows/ci-docs.yml'
|
||||
build:
|
||||
needs: changes
|
||||
if: needs.changes.outputs.docs_changed == 'true'
|
||||
name: Build Documentation
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@@ -64,8 +99,8 @@ jobs:
|
||||
name: github-pages-${{ github.run_id }}-${{ github.run_attempt }}
|
||||
deploy:
|
||||
name: Deploy Documentation
|
||||
needs: build
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs: [changes, build]
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main' && needs.changes.outputs.docs_changed == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
environment:
|
||||
name: github-pages
|
||||
@@ -76,3 +111,22 @@ jobs:
|
||||
id: deployment
|
||||
with:
|
||||
artifact_name: github-pages-${{ github.run_id }}-${{ github.run_attempt }}
|
||||
gate:
|
||||
name: Docs CI Gate
|
||||
needs: [changes, build]
|
||||
if: always()
|
||||
runs-on: ubuntu-slim
|
||||
steps:
|
||||
- name: Check gate
|
||||
run: |
|
||||
if [[ "${{ needs.changes.outputs.docs_changed }}" != "true" ]]; then
|
||||
echo "No docs-relevant changes detected."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${{ needs.build.result }}" != "success" ]]; then
|
||||
echo "::error::Docs build job result: ${{ needs.build.result }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Docs checks passed."
|
||||
|
||||
102
.github/workflows/ci-frontend.yml
vendored
102
.github/workflows/ci-frontend.yml
vendored
@@ -3,21 +3,60 @@ on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src-ui/**'
|
||||
- '.github/workflows/ci-frontend.yml'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src-ui/**'
|
||||
- '.github/workflows/ci-frontend.yml'
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: frontend-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
changes:
|
||||
name: Detect Frontend Changes
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
frontend_changed: ${{ steps.force.outputs.run_all == 'true' || steps.filter.outputs.frontend == 'true' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6.0.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Decide run mode
|
||||
id: force
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
echo "run_all=true" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ github.event_name }}" == "push" && ( "${{ github.ref_name }}" == "main" || "${{ github.ref_name }}" == "dev" ) ]]; then
|
||||
echo "run_all=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "run_all=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
- name: Set diff range
|
||||
id: range
|
||||
if: steps.force.outputs.run_all != 'true'
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "base=${{ github.event.pull_request.base.sha }}" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ github.event.created }}" == "true" ]]; then
|
||||
echo "base=${{ github.event.repository.default_branch }}" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "base=${{ github.event.before }}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
echo "ref=${{ github.sha }}" >> "$GITHUB_OUTPUT"
|
||||
- name: Detect changes
|
||||
id: filter
|
||||
if: steps.force.outputs.run_all != 'true'
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
with:
|
||||
base: ${{ steps.range.outputs.base }}
|
||||
ref: ${{ steps.range.outputs.ref }}
|
||||
filters: |
|
||||
frontend:
|
||||
- 'src-ui/**'
|
||||
- '.github/workflows/ci-frontend.yml'
|
||||
install-dependencies:
|
||||
needs: changes
|
||||
if: needs.changes.outputs.frontend_changed == 'true'
|
||||
name: Install Dependencies
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@@ -45,7 +84,8 @@ jobs:
|
||||
run: cd src-ui && pnpm install
|
||||
lint:
|
||||
name: Lint
|
||||
needs: install-dependencies
|
||||
needs: [changes, install-dependencies]
|
||||
if: needs.changes.outputs.frontend_changed == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -73,7 +113,8 @@ jobs:
|
||||
run: cd src-ui && pnpm run lint
|
||||
unit-tests:
|
||||
name: "Unit Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
needs: install-dependencies
|
||||
needs: [changes, install-dependencies]
|
||||
if: needs.changes.outputs.frontend_changed == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@@ -119,7 +160,8 @@ jobs:
|
||||
directory: src-ui/coverage/
|
||||
e2e-tests:
|
||||
name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
needs: install-dependencies
|
||||
needs: [changes, install-dependencies]
|
||||
if: needs.changes.outputs.frontend_changed == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
container: mcr.microsoft.com/playwright:v1.58.2-noble
|
||||
env:
|
||||
@@ -159,7 +201,8 @@ jobs:
|
||||
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
bundle-analysis:
|
||||
name: Bundle Analysis
|
||||
needs: [unit-tests, e2e-tests]
|
||||
needs: [changes, unit-tests, e2e-tests]
|
||||
if: needs.changes.outputs.frontend_changed == 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -189,3 +232,42 @@ jobs:
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: cd src-ui && pnpm run build --configuration=production
|
||||
gate:
|
||||
name: Frontend CI Gate
|
||||
needs: [changes, install-dependencies, lint, unit-tests, e2e-tests, bundle-analysis]
|
||||
if: always()
|
||||
runs-on: ubuntu-slim
|
||||
steps:
|
||||
- name: Check gate
|
||||
run: |
|
||||
if [[ "${{ needs.changes.outputs.frontend_changed }}" != "true" ]]; then
|
||||
echo "No frontend-relevant changes detected."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${{ needs['install-dependencies'].result }}" != "success" ]]; then
|
||||
echo "::error::Frontend install job result: ${{ needs['install-dependencies'].result }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${{ needs.lint.result }}" != "success" ]]; then
|
||||
echo "::error::Frontend lint job result: ${{ needs.lint.result }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${{ needs['unit-tests'].result }}" != "success" ]]; then
|
||||
echo "::error::Frontend unit-tests job result: ${{ needs['unit-tests'].result }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${{ needs['e2e-tests'].result }}" != "success" ]]; then
|
||||
echo "::error::Frontend e2e-tests job result: ${{ needs['e2e-tests'].result }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${{ needs['bundle-analysis'].result }}" != "success" ]]; then
|
||||
echo "::error::Frontend bundle-analysis job result: ${{ needs['bundle-analysis'].result }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Frontend checks passed."
|
||||
|
||||
17
.github/workflows/pr-bot.yml
vendored
17
.github/workflows/pr-bot.yml
vendored
@@ -2,13 +2,24 @@ name: PR Bot
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
jobs:
|
||||
anti-slop:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
issues: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: peakoss/anti-slop@v0.2.1
|
||||
with:
|
||||
max-failures: 4
|
||||
failure-add-pr-labels: 'ai'
|
||||
pr-bot:
|
||||
name: Automated PR Bot
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Label PR by file path or branch name
|
||||
# see .github/labeler.yml for the labeler config
|
||||
|
||||
@@ -75,13 +75,13 @@ first-time setup.
|
||||
4. Install the Python dependencies:
|
||||
|
||||
```bash
|
||||
$ uv sync --group dev
|
||||
uv sync --group dev
|
||||
```
|
||||
|
||||
5. Install pre-commit hooks:
|
||||
|
||||
```bash
|
||||
$ uv run prek install
|
||||
uv run prek install
|
||||
```
|
||||
|
||||
6. Apply migrations and create a superuser (also can be done via the web UI) for your development instance:
|
||||
@@ -89,8 +89,8 @@ first-time setup.
|
||||
```bash
|
||||
# src/
|
||||
|
||||
$ uv run manage.py migrate
|
||||
$ uv run manage.py createsuperuser
|
||||
uv run manage.py migrate
|
||||
uv run manage.py createsuperuser
|
||||
```
|
||||
|
||||
7. You can now either ...
|
||||
@@ -103,7 +103,7 @@ first-time setup.
|
||||
|
||||
- spin up a bare Redis container
|
||||
|
||||
```
|
||||
```bash
|
||||
docker run -d -p 6379:6379 --restart unless-stopped redis:latest
|
||||
```
|
||||
|
||||
@@ -118,18 +118,18 @@ work well for development, but you can use whatever you want.
|
||||
Configure the IDE to use the `src/`-folder as the base source folder.
|
||||
Configure the following launch configurations in your IDE:
|
||||
|
||||
- `python3 manage.py runserver`
|
||||
- `python3 manage.py document_consumer`
|
||||
- `celery --app paperless worker -l DEBUG` (or any other log level)
|
||||
- `uv run manage.py runserver`
|
||||
- `uv run manage.py document_consumer`
|
||||
- `uv run celery --app paperless worker -l DEBUG` (or any other log level)
|
||||
|
||||
To start them all:
|
||||
|
||||
```bash
|
||||
# src/
|
||||
|
||||
$ python3 manage.py runserver & \
|
||||
python3 manage.py document_consumer & \
|
||||
celery --app paperless worker -l DEBUG
|
||||
uv run manage.py runserver & \
|
||||
uv run manage.py document_consumer & \
|
||||
uv run celery --app paperless worker -l DEBUG
|
||||
```
|
||||
|
||||
You might need the front end to test your back end code.
|
||||
@@ -140,8 +140,8 @@ To build the front end once use this command:
|
||||
```bash
|
||||
# src-ui/
|
||||
|
||||
$ pnpm install
|
||||
$ ng build --configuration production
|
||||
pnpm install
|
||||
pnpm ng build --configuration production
|
||||
```
|
||||
|
||||
### Testing
|
||||
@@ -199,7 +199,7 @@ The front end is built using AngularJS. In order to get started, you need Node.j
|
||||
4. You can launch a development server by running:
|
||||
|
||||
```bash
|
||||
ng serve
|
||||
pnpm ng serve
|
||||
```
|
||||
|
||||
This will automatically update whenever you save. However, in-place
|
||||
@@ -217,21 +217,21 @@ commit. See [above](#code-formatting-with-pre-commit-hooks) for installation ins
|
||||
command such as
|
||||
|
||||
```bash
|
||||
$ git ls-files -- '*.ts' | xargs prek run prettier --files
|
||||
git ls-files -- '*.ts' | xargs uv run prek run prettier --files
|
||||
```
|
||||
|
||||
Front end testing uses Jest and Playwright. Unit tests and e2e tests,
|
||||
respectively, can be run non-interactively with:
|
||||
|
||||
```bash
|
||||
$ ng test
|
||||
$ npx playwright test
|
||||
pnpm ng test
|
||||
pnpm playwright test
|
||||
```
|
||||
|
||||
Playwright also includes a UI which can be run with:
|
||||
|
||||
```bash
|
||||
$ npx playwright test --ui
|
||||
pnpm playwright test --ui
|
||||
```
|
||||
|
||||
### Building the frontend
|
||||
@@ -239,7 +239,7 @@ $ npx playwright test --ui
|
||||
In order to build the front end and serve it as part of Django, execute:
|
||||
|
||||
```bash
|
||||
$ ng build --configuration production
|
||||
pnpm ng build --configuration production
|
||||
```
|
||||
|
||||
This will build the front end and put it in a location from which the
|
||||
@@ -312,10 +312,10 @@ end (such as error messages).
|
||||
- The source language of the project is "en_US".
|
||||
- Localization files end up in the folder `src/locale/`.
|
||||
- In order to extract strings from the application, call
|
||||
`python3 manage.py makemessages -l en_US`. This is important after
|
||||
`uv run manage.py makemessages -l en_US`. This is important after
|
||||
making changes to translatable strings.
|
||||
- The message files need to be compiled for them to show up in the
|
||||
application. Call `python3 manage.py compilemessages` to do this.
|
||||
application. Call `uv run manage.py compilemessages` to do this.
|
||||
The generated files don't get committed into git, since these are
|
||||
derived artifacts. The build pipeline takes care of executing this
|
||||
command.
|
||||
|
||||
@@ -49,6 +49,7 @@ dependencies = [
|
||||
"flower~=2.0.1",
|
||||
"gotenberg-client~=0.13.1",
|
||||
"httpx-oauth~=0.16",
|
||||
"ijson>=3.2",
|
||||
"imap-tools~=1.11.0",
|
||||
"jinja2~=3.1.5",
|
||||
"langdetect~=1.0.9",
|
||||
|
||||
@@ -1217,7 +1217,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1756</context>
|
||||
<context context-type="linenumber">1760</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1577733187050997705" datatype="html">
|
||||
@@ -2090,7 +2090,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">634</context>
|
||||
<context context-type="linenumber">637</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-version-dropdown/document-version-dropdown.component.html</context>
|
||||
@@ -2798,11 +2798,11 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1376</context>
|
||||
<context context-type="linenumber">1379</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1757</context>
|
||||
<context context-type="linenumber">1761</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -3400,7 +3400,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1329</context>
|
||||
<context context-type="linenumber">1332</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -3434,39 +3434,46 @@
|
||||
<context context-type="linenumber">9</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6705735915615634619" datatype="html">
|
||||
<source>{VAR_PLURAL, plural, =1 {One page} other {<x id="INTERPOLATION"/> pages}}</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/merge-confirm-dialog/merge-confirm-dialog.component.html</context>
|
||||
<context context-type="linenumber">25</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="7508164375697837821" datatype="html">
|
||||
<source>Use metadata from:</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/merge-confirm-dialog/merge-confirm-dialog.component.html</context>
|
||||
<context context-type="linenumber">22</context>
|
||||
<context context-type="linenumber">34</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2020403212524346652" datatype="html">
|
||||
<source>Regenerate all metadata</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/merge-confirm-dialog/merge-confirm-dialog.component.html</context>
|
||||
<context context-type="linenumber">24</context>
|
||||
<context context-type="linenumber">36</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2710430925353472741" datatype="html">
|
||||
<source>Try to include archive version in merge for non-PDF files</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/merge-confirm-dialog/merge-confirm-dialog.component.html</context>
|
||||
<context context-type="linenumber">32</context>
|
||||
<context context-type="linenumber">44</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5612366187076076264" datatype="html">
|
||||
<source>Delete original documents after successful merge</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/merge-confirm-dialog/merge-confirm-dialog.component.html</context>
|
||||
<context context-type="linenumber">36</context>
|
||||
<context context-type="linenumber">48</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5138283234724909648" datatype="html">
|
||||
<source>Note that only PDFs will be included.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/merge-confirm-dialog/merge-confirm-dialog.component.html</context>
|
||||
<context context-type="linenumber">39</context>
|
||||
<context context-type="linenumber">51</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1309641780471803652" datatype="html">
|
||||
@@ -3505,7 +3512,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1808</context>
|
||||
<context context-type="linenumber">1814</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6661109599266152398" datatype="html">
|
||||
@@ -3516,7 +3523,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1809</context>
|
||||
<context context-type="linenumber">1815</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5162686434580248853" datatype="html">
|
||||
@@ -3527,7 +3534,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1810</context>
|
||||
<context context-type="linenumber">1816</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8157388568390631653" datatype="html">
|
||||
@@ -5488,7 +5495,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1333</context>
|
||||
<context context-type="linenumber">1336</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -7695,81 +7702,81 @@
|
||||
<source>Error retrieving metadata</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">408</context>
|
||||
<context context-type="linenumber">411</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2218903673684131427" datatype="html">
|
||||
<source>An error occurred loading content: <x id="PH" equiv-text="err.message ?? err.toString()"/></source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">509,511</context>
|
||||
<context context-type="linenumber">512,514</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">956,958</context>
|
||||
<context context-type="linenumber">959,961</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6357361810318120957" datatype="html">
|
||||
<source>Document was updated</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">629</context>
|
||||
<context context-type="linenumber">632</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5154064822428631306" datatype="html">
|
||||
<source>Document was updated at <x id="PH" equiv-text="formattedModified"/>.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">630</context>
|
||||
<context context-type="linenumber">633</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8462497568316256794" datatype="html">
|
||||
<source>Reload to discard your local unsaved edits and load the latest remote version.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">631</context>
|
||||
<context context-type="linenumber">634</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="7967484035994732534" datatype="html">
|
||||
<source>Reload</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">633</context>
|
||||
<context context-type="linenumber">636</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2907037627372942104" datatype="html">
|
||||
<source>Document reloaded with latest changes.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">689</context>
|
||||
<context context-type="linenumber">692</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6435639868943916539" datatype="html">
|
||||
<source>Document reloaded.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">700</context>
|
||||
<context context-type="linenumber">703</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6142395741265832184" datatype="html">
|
||||
<source>Next document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">802</context>
|
||||
<context context-type="linenumber">805</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="651985345816518480" datatype="html">
|
||||
<source>Previous document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">812</context>
|
||||
<context context-type="linenumber">815</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2885986061416655600" datatype="html">
|
||||
<source>Close document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">820</context>
|
||||
<context context-type="linenumber">823</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/services/open-documents.service.ts</context>
|
||||
@@ -7780,67 +7787,67 @@
|
||||
<source>Save document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">827</context>
|
||||
<context context-type="linenumber">830</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1784543155727940353" datatype="html">
|
||||
<source>Save and close / next</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">836</context>
|
||||
<context context-type="linenumber">839</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="7427704425579737895" datatype="html">
|
||||
<source>Error retrieving version content</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">940</context>
|
||||
<context context-type="linenumber">943</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3456881259945295697" datatype="html">
|
||||
<source>Error retrieving suggestions.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">997</context>
|
||||
<context context-type="linenumber">1000</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2194092841814123758" datatype="html">
|
||||
<source>Document "<x id="PH" equiv-text="newValues.title"/>" saved successfully.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1209</context>
|
||||
<context context-type="linenumber">1212</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1236</context>
|
||||
<context context-type="linenumber">1239</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6626387786259219838" datatype="html">
|
||||
<source>Error saving document "<x id="PH" equiv-text="this.document.title"/>"</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1242</context>
|
||||
<context context-type="linenumber">1245</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="448882439049417053" datatype="html">
|
||||
<source>Error saving document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1297</context>
|
||||
<context context-type="linenumber">1300</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8410796510716511826" datatype="html">
|
||||
<source>Do you really want to move the document "<x id="PH" equiv-text="this.document.title"/>" to the trash?</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1330</context>
|
||||
<context context-type="linenumber">1333</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="282586936710748252" datatype="html">
|
||||
<source>Documents can be restored prior to permanent deletion.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1331</context>
|
||||
<context context-type="linenumber">1334</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -7851,14 +7858,14 @@
|
||||
<source>Error deleting document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1352</context>
|
||||
<context context-type="linenumber">1355</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="619486176823357521" datatype="html">
|
||||
<source>Reprocess confirm</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1372</context>
|
||||
<context context-type="linenumber">1375</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -7869,102 +7876,102 @@
|
||||
<source>This operation will permanently recreate the archive file for this document.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1373</context>
|
||||
<context context-type="linenumber">1376</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="302054111564709516" datatype="html">
|
||||
<source>The archive file will be re-generated with the current settings.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1374</context>
|
||||
<context context-type="linenumber">1377</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4700389117298802932" datatype="html">
|
||||
<source>Reprocess operation for "<x id="PH" equiv-text="this.document.title"/>" will begin in the background.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1384</context>
|
||||
<context context-type="linenumber">1387</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4409560272830824468" datatype="html">
|
||||
<source>Error executing operation</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1395</context>
|
||||
<context context-type="linenumber">1398</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6030453331794586802" datatype="html">
|
||||
<source>Error downloading document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1458</context>
|
||||
<context context-type="linenumber">1461</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4458954481601077369" datatype="html">
|
||||
<source>Page Fit</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1538</context>
|
||||
<context context-type="linenumber">1541</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4663705961777238777" datatype="html">
|
||||
<source>PDF edit operation for "<x id="PH" equiv-text="this.document.title"/>" will begin in the background.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1775</context>
|
||||
<context context-type="linenumber">1781</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="9043972994040261999" datatype="html">
|
||||
<source>Error executing PDF edit operation</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1787</context>
|
||||
<context context-type="linenumber">1793</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6172690334763056188" datatype="html">
|
||||
<source>Please enter the current password before attempting to remove it.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1798</context>
|
||||
<context context-type="linenumber">1804</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="968660764814228922" datatype="html">
|
||||
<source>Password removal operation for "<x id="PH" equiv-text="this.document.title"/>" will begin in the background.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1830</context>
|
||||
<context context-type="linenumber">1838</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2282118435712883014" datatype="html">
|
||||
<source>Error executing password removal operation</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1844</context>
|
||||
<context context-type="linenumber">1852</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3740891324955700797" datatype="html">
|
||||
<source>Print failed.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1883</context>
|
||||
<context context-type="linenumber">1891</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6457245677384603573" datatype="html">
|
||||
<source>Error loading document for printing.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1895</context>
|
||||
<context context-type="linenumber">1903</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6085793215710522488" datatype="html">
|
||||
<source>An error occurred loading tiff: <x id="PH" equiv-text="err.toString()"/></source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1960</context>
|
||||
<context context-type="linenumber">1968</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1964</context>
|
||||
<context context-type="linenumber">1972</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4958946940233632319" datatype="html">
|
||||
|
||||
@@ -10,10 +10,22 @@
|
||||
<ul class="list-group"
|
||||
cdkDropList
|
||||
(cdkDropListDropped)="onDrop($event)">
|
||||
@for (documentID of documentIDs; track documentID) {
|
||||
<li class="list-group-item" cdkDrag>
|
||||
@for (document of documents; track document.id) {
|
||||
<li class="list-group-item d-flex align-items-center" cdkDrag>
|
||||
<i-bs name="grip-vertical" class="me-2"></i-bs>
|
||||
{{getDocument(documentID)?.title}}
|
||||
<div class="d-flex flex-column">
|
||||
<div>
|
||||
@if (document.correspondent) {
|
||||
<b>{{document.correspondent | correspondentName | async}}: </b>
|
||||
}{{document.title}}
|
||||
</div>
|
||||
<small class="text-muted">
|
||||
{{document.created | customDate:'mediumDate'}}
|
||||
@if (document.page_count) {
|
||||
| {document.page_count, plural, =1 {One page} other {{{document.page_count}} pages}}
|
||||
}
|
||||
</small>
|
||||
</div>
|
||||
</li>
|
||||
}
|
||||
</ul>
|
||||
|
||||
@@ -3,11 +3,14 @@ import {
|
||||
DragDropModule,
|
||||
moveItemInArray,
|
||||
} from '@angular/cdk/drag-drop'
|
||||
import { AsyncPipe } from '@angular/common'
|
||||
import { Component, OnInit, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { takeUntil } from 'rxjs'
|
||||
import { Document } from 'src/app/data/document'
|
||||
import { CorrespondentNamePipe } from 'src/app/pipes/correspondent-name.pipe'
|
||||
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
|
||||
import { PermissionsService } from 'src/app/services/permissions.service'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { ConfirmDialogComponent } from '../confirm-dialog.component'
|
||||
@@ -17,6 +20,9 @@ import { ConfirmDialogComponent } from '../confirm-dialog.component'
|
||||
templateUrl: './merge-confirm-dialog.component.html',
|
||||
styleUrl: './merge-confirm-dialog.component.scss',
|
||||
imports: [
|
||||
AsyncPipe,
|
||||
CorrespondentNamePipe,
|
||||
CustomDatePipe,
|
||||
DragDropModule,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
|
||||
@@ -3,6 +3,7 @@ import { provideHttpClientTesting } from '@angular/common/http/testing'
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { PDFEditorComponent } from './pdf-editor.component'
|
||||
|
||||
describe('PDFEditorComponent', () => {
|
||||
@@ -139,4 +140,16 @@ describe('PDFEditorComponent', () => {
|
||||
expect(component.pages[1].page).toBe(2)
|
||||
expect(component.pages[2].page).toBe(3)
|
||||
})
|
||||
|
||||
it('should include selected version in preview source when provided', () => {
|
||||
const documentService = TestBed.inject(DocumentService)
|
||||
const previewSpy = jest
|
||||
.spyOn(documentService, 'getPreviewUrl')
|
||||
.mockReturnValue('preview-version')
|
||||
component.documentID = 3
|
||||
component.versionID = 10
|
||||
|
||||
expect(component.pdfSrc).toBe('preview-version')
|
||||
expect(previewSpy).toHaveBeenCalledWith(3, false, 10)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -46,6 +46,7 @@ export class PDFEditorComponent extends ConfirmDialogComponent {
|
||||
activeModal: NgbActiveModal = inject(NgbActiveModal)
|
||||
|
||||
documentID: number
|
||||
versionID?: number
|
||||
pages: PageOperation[] = []
|
||||
totalPages = 0
|
||||
editMode: PdfEditorEditMode = this.settingsService.get(
|
||||
@@ -55,7 +56,11 @@ export class PDFEditorComponent extends ConfirmDialogComponent {
|
||||
includeMetadata: boolean = true
|
||||
|
||||
get pdfSrc(): string {
|
||||
return this.documentService.getPreviewUrl(this.documentID)
|
||||
return this.documentService.getPreviewUrl(
|
||||
this.documentID,
|
||||
false,
|
||||
this.versionID
|
||||
)
|
||||
}
|
||||
|
||||
pdfLoaded(pdf: PngxPdfDocumentProxy) {
|
||||
|
||||
@@ -1661,22 +1661,25 @@ describe('DocumentDetailComponent', () => {
|
||||
const closeSpy = jest.spyOn(openDocumentsService, 'closeDocument')
|
||||
const errorSpy = jest.spyOn(toastService, 'showError')
|
||||
initNormally()
|
||||
component.selectedVersionId = 10
|
||||
component.editPdf()
|
||||
expect(modal).not.toBeUndefined()
|
||||
modal.componentInstance.documentID = doc.id
|
||||
expect(modal.componentInstance.versionID).toBe(10)
|
||||
modal.componentInstance.pages = [{ page: 1, rotate: 0, splitAfter: false }]
|
||||
modal.componentInstance.confirm()
|
||||
let req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
expect(req.request.body).toEqual({
|
||||
documents: [doc.id],
|
||||
documents: [10],
|
||||
method: 'edit_pdf',
|
||||
parameters: {
|
||||
operations: [{ page: 1, rotate: 0, doc: 0 }],
|
||||
delete_original: false,
|
||||
update_document: false,
|
||||
include_metadata: true,
|
||||
source_mode: 'explicit_selection',
|
||||
},
|
||||
})
|
||||
req.error(new ErrorEvent('failed'))
|
||||
@@ -1698,6 +1701,7 @@ describe('DocumentDetailComponent', () => {
|
||||
let modal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((m) => (modal = m[0]))
|
||||
initNormally()
|
||||
component.selectedVersionId = 10
|
||||
component.password = 'secret'
|
||||
component.removePassword()
|
||||
const dialog =
|
||||
@@ -1710,13 +1714,14 @@ describe('DocumentDetailComponent', () => {
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
expect(req.request.body).toEqual({
|
||||
documents: [doc.id],
|
||||
documents: [10],
|
||||
method: 'remove_password',
|
||||
parameters: {
|
||||
password: 'secret',
|
||||
update_document: false,
|
||||
include_metadata: false,
|
||||
delete_original: true,
|
||||
source_mode: 'explicit_selection',
|
||||
},
|
||||
})
|
||||
req.flush(true)
|
||||
|
||||
@@ -74,7 +74,10 @@ import {
|
||||
import { CorrespondentService } from 'src/app/services/rest/correspondent.service'
|
||||
import { CustomFieldsService } from 'src/app/services/rest/custom-fields.service'
|
||||
import { DocumentTypeService } from 'src/app/services/rest/document-type.service'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import {
|
||||
BulkEditSourceMode,
|
||||
DocumentService,
|
||||
} from 'src/app/services/rest/document.service'
|
||||
import { SavedViewService } from 'src/app/services/rest/saved-view.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { TagService } from 'src/app/services/rest/tag.service'
|
||||
@@ -1753,20 +1756,23 @@ export class DocumentDetailComponent
|
||||
size: 'xl',
|
||||
scrollable: true,
|
||||
})
|
||||
const sourceDocumentId = this.selectedVersionId ?? this.document.id
|
||||
modal.componentInstance.title = $localize`PDF Editor`
|
||||
modal.componentInstance.btnCaption = $localize`Proceed`
|
||||
modal.componentInstance.documentID = this.document.id
|
||||
modal.componentInstance.versionID = sourceDocumentId
|
||||
modal.componentInstance.confirmClicked
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe(() => {
|
||||
modal.componentInstance.buttonsEnabled = false
|
||||
this.documentsService
|
||||
.bulkEdit([this.document.id], 'edit_pdf', {
|
||||
.bulkEdit([sourceDocumentId], 'edit_pdf', {
|
||||
operations: modal.componentInstance.getOperations(),
|
||||
delete_original: modal.componentInstance.deleteOriginal,
|
||||
update_document:
|
||||
modal.componentInstance.editMode == PdfEditorEditMode.Update,
|
||||
include_metadata: modal.componentInstance.includeMetadata,
|
||||
source_mode: BulkEditSourceMode.EXPLICIT_SELECTION,
|
||||
})
|
||||
.pipe(first(), takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe({
|
||||
@@ -1812,16 +1818,18 @@ export class DocumentDetailComponent
|
||||
modal.componentInstance.confirmClicked
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe(() => {
|
||||
const sourceDocumentId = this.selectedVersionId ?? this.document.id
|
||||
const dialog =
|
||||
modal.componentInstance as PasswordRemovalConfirmDialogComponent
|
||||
dialog.buttonsEnabled = false
|
||||
this.networkActive = true
|
||||
this.documentsService
|
||||
.bulkEdit([this.document.id], 'remove_password', {
|
||||
.bulkEdit([sourceDocumentId], 'remove_password', {
|
||||
password: this.password,
|
||||
update_document: dialog.updateDocument,
|
||||
include_metadata: dialog.includeMetadata,
|
||||
delete_original: dialog.deleteOriginal,
|
||||
source_mode: BulkEditSourceMode.EXPLICIT_SELECTION,
|
||||
})
|
||||
.pipe(first(), takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe({
|
||||
|
||||
@@ -37,6 +37,11 @@ export interface SelectionData {
|
||||
selected_custom_fields: SelectionDataItem[]
|
||||
}
|
||||
|
||||
export enum BulkEditSourceMode {
|
||||
LATEST_VERSION = 'latest_version',
|
||||
EXPLICIT_SELECTION = 'explicit_selection',
|
||||
}
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root',
|
||||
})
|
||||
|
||||
@@ -29,12 +29,21 @@ from documents.plugins.helpers import DocumentsStatusManager
|
||||
from documents.tasks import bulk_update_documents
|
||||
from documents.tasks import consume_file
|
||||
from documents.tasks import update_document_content_maybe_archive_file
|
||||
from documents.versioning import get_latest_version_for_root
|
||||
from documents.versioning import get_root_document
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
logger: logging.Logger = logging.getLogger("paperless.bulk_edit")
|
||||
|
||||
SourceMode = Literal["latest_version", "explicit_selection"]
|
||||
|
||||
|
||||
class SourceModeChoices:
|
||||
LATEST_VERSION: SourceMode = "latest_version"
|
||||
EXPLICIT_SELECTION: SourceMode = "explicit_selection"
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def restore_archive_serial_numbers_task(
|
||||
@@ -72,46 +81,21 @@ def restore_archive_serial_numbers(backup: dict[int, int | None]) -> None:
|
||||
logger.info(f"Restored archive serial numbers for documents {list(backup.keys())}")
|
||||
|
||||
|
||||
def _get_root_ids_by_doc_id(doc_ids: list[int]) -> dict[int, int]:
|
||||
"""
|
||||
Resolve each provided document id to its root document id.
|
||||
def _resolve_root_and_source_doc(
|
||||
doc: Document,
|
||||
*,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
) -> tuple[Document, Document]:
|
||||
root_doc = get_root_document(doc)
|
||||
|
||||
- If the id is already a root document: root id is itself.
|
||||
- If the id is a version document: root id is its `root_document_id`.
|
||||
"""
|
||||
qs = Document.objects.filter(id__in=doc_ids).only("id", "root_document_id")
|
||||
return {doc.id: doc.root_document_id or doc.id for doc in qs}
|
||||
if source_mode == SourceModeChoices.EXPLICIT_SELECTION:
|
||||
return root_doc, doc
|
||||
|
||||
# Version IDs are explicit by default, only a selected root resolves to latest
|
||||
if doc.root_document_id is not None:
|
||||
return root_doc, doc
|
||||
|
||||
def _get_root_and_current_docs_by_root_id(
|
||||
root_ids: set[int],
|
||||
) -> tuple[dict[int, Document], dict[int, Document]]:
|
||||
"""
|
||||
Returns:
|
||||
- root_docs: root_id -> root Document
|
||||
- current_docs: root_id -> newest version Document (or root if none)
|
||||
"""
|
||||
root_docs = {
|
||||
doc.id: doc
|
||||
for doc in Document.objects.filter(id__in=root_ids).select_related(
|
||||
"owner",
|
||||
)
|
||||
}
|
||||
latest_versions_by_root_id: dict[int, Document] = {}
|
||||
for version_doc in Document.objects.filter(root_document_id__in=root_ids).order_by(
|
||||
"root_document_id",
|
||||
"-id",
|
||||
):
|
||||
root_id = version_doc.root_document_id
|
||||
if root_id is None:
|
||||
continue
|
||||
latest_versions_by_root_id.setdefault(root_id, version_doc)
|
||||
|
||||
current_docs: dict[int, Document] = {
|
||||
root_id: latest_versions_by_root_id.get(root_id, root_docs[root_id])
|
||||
for root_id in root_docs
|
||||
}
|
||||
return root_docs, current_docs
|
||||
return root_doc, get_latest_version_for_root(root_doc)
|
||||
|
||||
|
||||
def set_correspondent(
|
||||
@@ -421,21 +405,32 @@ def rotate(
|
||||
doc_ids: list[int],
|
||||
degrees: int,
|
||||
*,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to rotate {len(doc_ids)} documents by {degrees} degrees.",
|
||||
)
|
||||
doc_to_root_id = _get_root_ids_by_doc_id(doc_ids)
|
||||
root_ids = set(doc_to_root_id.values())
|
||||
root_docs_by_id, current_docs_by_root_id = _get_root_and_current_docs_by_root_id(
|
||||
root_ids,
|
||||
)
|
||||
docs_by_id = {
|
||||
doc.id: doc
|
||||
for doc in Document.objects.select_related("root_document").filter(
|
||||
id__in=doc_ids,
|
||||
)
|
||||
}
|
||||
docs_by_root_id: dict[int, tuple[Document, Document]] = {}
|
||||
for doc_id in doc_ids:
|
||||
doc = docs_by_id.get(doc_id)
|
||||
if doc is None:
|
||||
continue
|
||||
root_doc, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
docs_by_root_id.setdefault(root_doc.id, (root_doc, source_doc))
|
||||
|
||||
import pikepdf
|
||||
|
||||
for root_id in root_ids:
|
||||
root_doc = root_docs_by_id[root_id]
|
||||
source_doc = current_docs_by_root_id[root_id]
|
||||
for root_doc, source_doc in docs_by_root_id.values():
|
||||
if source_doc.mime_type != "application/pdf":
|
||||
logger.warning(
|
||||
f"Document {root_doc.id} is not a PDF, skipping rotation.",
|
||||
@@ -481,12 +476,14 @@ def merge(
|
||||
metadata_document_id: int | None = None,
|
||||
delete_originals: bool = False,
|
||||
archive_fallback: bool = False,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to merge {len(doc_ids)} documents into a single document.",
|
||||
)
|
||||
qs = Document.objects.filter(id__in=doc_ids)
|
||||
qs = Document.objects.select_related("root_document").filter(id__in=doc_ids)
|
||||
docs_by_id = {doc.id: doc for doc in qs}
|
||||
affected_docs: list[int] = []
|
||||
import pikepdf
|
||||
|
||||
@@ -495,14 +492,20 @@ def merge(
|
||||
handoff_asn: int | None = None
|
||||
# use doc_ids to preserve order
|
||||
for doc_id in doc_ids:
|
||||
doc = qs.get(id=doc_id)
|
||||
doc = docs_by_id.get(doc_id)
|
||||
if doc is None:
|
||||
continue
|
||||
_, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
try:
|
||||
doc_path = (
|
||||
doc.archive_path
|
||||
source_doc.archive_path
|
||||
if archive_fallback
|
||||
and doc.mime_type != "application/pdf"
|
||||
and doc.has_archive_version
|
||||
else doc.source_path
|
||||
and source_doc.mime_type != "application/pdf"
|
||||
and source_doc.has_archive_version
|
||||
else source_doc.source_path
|
||||
)
|
||||
with pikepdf.open(str(doc_path)) as pdf:
|
||||
version = max(version, pdf.pdf_version)
|
||||
@@ -584,18 +587,23 @@ def split(
|
||||
pages: list[list[int]],
|
||||
*,
|
||||
delete_originals: bool = False,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to split document {doc_ids[0]} into {len(pages)} documents",
|
||||
)
|
||||
doc = Document.objects.get(id=doc_ids[0])
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_ids[0])
|
||||
_, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
import pikepdf
|
||||
|
||||
consume_tasks = []
|
||||
|
||||
try:
|
||||
with pikepdf.open(doc.source_path) as pdf:
|
||||
with pikepdf.open(source_doc.source_path) as pdf:
|
||||
for idx, split_doc in enumerate(pages):
|
||||
dst: pikepdf.Pdf = pikepdf.new()
|
||||
for page in split_doc:
|
||||
@@ -659,25 +667,17 @@ def delete_pages(
|
||||
doc_ids: list[int],
|
||||
pages: list[int],
|
||||
*,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to delete pages {pages} from {len(doc_ids)} documents",
|
||||
)
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_ids[0])
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
root_doc, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
pages = sorted(pages) # sort pages to avoid index issues
|
||||
import pikepdf
|
||||
|
||||
@@ -722,6 +722,7 @@ def edit_pdf(
|
||||
delete_original: bool = False,
|
||||
update_document: bool = False,
|
||||
include_metadata: bool = True,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
"""
|
||||
@@ -736,19 +737,10 @@ def edit_pdf(
|
||||
f"Editing PDF of document {doc_ids[0]} with {len(operations)} operations",
|
||||
)
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_ids[0])
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
root_doc, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
import pikepdf
|
||||
|
||||
pdf_docs: list[pikepdf.Pdf] = []
|
||||
@@ -859,6 +851,7 @@ def remove_password(
|
||||
update_document: bool = False,
|
||||
delete_original: bool = False,
|
||||
include_metadata: bool = True,
|
||||
source_mode: SourceMode = SourceModeChoices.LATEST_VERSION,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
"""
|
||||
@@ -868,19 +861,10 @@ def remove_password(
|
||||
|
||||
for doc_id in doc_ids:
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_id)
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
root_doc, source_doc = _resolve_root_and_source_doc(
|
||||
doc,
|
||||
source_mode=source_mode,
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
try:
|
||||
logger.info(
|
||||
f"Attempting password removal from document {doc_ids[0]}",
|
||||
|
||||
@@ -304,7 +304,7 @@ class PaperlessCommand(RichCommand):
|
||||
|
||||
Progress output is directed to stderr to match the convention that
|
||||
progress bars are transient UI feedback, not command output. This
|
||||
mirrors tqdm's default behavior and prevents progress bar rendering
|
||||
mirrors the convention that progress bars are transient UI feedback and prevents progress bar rendering
|
||||
from interfering with stdout-based assertions in tests or piped
|
||||
command output.
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ class Command(PaperlessCommand):
|
||||
"modified) after their initial import."
|
||||
)
|
||||
|
||||
supports_progress_bar = True
|
||||
supports_multiprocessing = True
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
||||
@@ -3,12 +3,10 @@ import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from itertools import chain
|
||||
from itertools import islice
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import tqdm
|
||||
from allauth.mfa.models import Authenticator
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
@@ -19,7 +17,6 @@ from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core import serializers
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db import transaction
|
||||
@@ -38,6 +35,7 @@ if settings.AUDIT_LOG_ENABLED:
|
||||
|
||||
from documents.file_handling import delete_empty_directories
|
||||
from documents.file_handling import generate_filename
|
||||
from documents.management.commands.base import PaperlessCommand
|
||||
from documents.management.commands.mixins import CryptMixin
|
||||
from documents.models import Correspondent
|
||||
from documents.models import CustomField
|
||||
@@ -81,14 +79,99 @@ def serialize_queryset_batched(
|
||||
yield serializers.serialize("python", chunk)
|
||||
|
||||
|
||||
class Command(CryptMixin, BaseCommand):
|
||||
class StreamingManifestWriter:
|
||||
"""Incrementally writes a JSON array to a file, one record at a time.
|
||||
|
||||
Writes to <target>.tmp first; on close(), optionally BLAKE2b-compares
|
||||
with the existing file (--compare-json) and renames or discards accordingly.
|
||||
On exception, discard() deletes the tmp file and leaves the original intact.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: Path,
|
||||
*,
|
||||
compare_json: bool = False,
|
||||
files_in_export_dir: "set[Path] | None" = None,
|
||||
) -> None:
|
||||
self._path = path.resolve()
|
||||
self._tmp_path = self._path.with_suffix(self._path.suffix + ".tmp")
|
||||
self._compare_json = compare_json
|
||||
self._files_in_export_dir: set[Path] = (
|
||||
files_in_export_dir if files_in_export_dir is not None else set()
|
||||
)
|
||||
self._file = None
|
||||
self._first = True
|
||||
|
||||
def open(self) -> None:
|
||||
self._path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self._file = self._tmp_path.open("w", encoding="utf-8")
|
||||
self._file.write("[")
|
||||
self._first = True
|
||||
|
||||
def write_record(self, record: dict) -> None:
|
||||
if not self._first:
|
||||
self._file.write(",\n")
|
||||
else:
|
||||
self._first = False
|
||||
self._file.write(
|
||||
json.dumps(record, cls=DjangoJSONEncoder, indent=2, ensure_ascii=False),
|
||||
)
|
||||
|
||||
def write_batch(self, records: list[dict]) -> None:
|
||||
for record in records:
|
||||
self.write_record(record)
|
||||
|
||||
def close(self) -> None:
|
||||
if self._file is None:
|
||||
return
|
||||
self._file.write("\n]")
|
||||
self._file.close()
|
||||
self._file = None
|
||||
self._finalize()
|
||||
|
||||
def discard(self) -> None:
|
||||
if self._file is not None:
|
||||
self._file.close()
|
||||
self._file = None
|
||||
if self._tmp_path.exists():
|
||||
self._tmp_path.unlink()
|
||||
|
||||
def _finalize(self) -> None:
|
||||
"""Compare with existing file (if --compare-json) then rename or discard tmp."""
|
||||
if self._path in self._files_in_export_dir:
|
||||
self._files_in_export_dir.remove(self._path)
|
||||
if self._compare_json:
|
||||
existing_hash = hashlib.blake2b(self._path.read_bytes()).hexdigest()
|
||||
new_hash = hashlib.blake2b(self._tmp_path.read_bytes()).hexdigest()
|
||||
if existing_hash == new_hash:
|
||||
self._tmp_path.unlink()
|
||||
return
|
||||
self._tmp_path.rename(self._path)
|
||||
|
||||
def __enter__(self) -> "StreamingManifestWriter":
|
||||
self.open()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
|
||||
if exc_type is not None:
|
||||
self.discard()
|
||||
else:
|
||||
self.close()
|
||||
|
||||
|
||||
class Command(CryptMixin, PaperlessCommand):
|
||||
help = (
|
||||
"Decrypt and rename all files in our collection into a given target "
|
||||
"directory. And include a manifest file containing document data for "
|
||||
"easy import."
|
||||
)
|
||||
|
||||
supports_progress_bar = True
|
||||
supports_multiprocessing = False
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument("target")
|
||||
|
||||
parser.add_argument(
|
||||
@@ -195,13 +278,6 @@ class Command(CryptMixin, BaseCommand):
|
||||
help="If set, only the database will be imported, not files",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--no-progress-bar",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="If set, the progress bar will not be shown",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--passphrase",
|
||||
help="If provided, is used to encrypt sensitive data in the export",
|
||||
@@ -230,7 +306,6 @@ class Command(CryptMixin, BaseCommand):
|
||||
self.no_thumbnail: bool = options["no_thumbnail"]
|
||||
self.zip_export: bool = options["zip"]
|
||||
self.data_only: bool = options["data_only"]
|
||||
self.no_progress_bar: bool = options["no_progress_bar"]
|
||||
self.passphrase: str | None = options.get("passphrase")
|
||||
self.batch_size: int = options["batch_size"]
|
||||
|
||||
@@ -322,95 +397,85 @@ class Command(CryptMixin, BaseCommand):
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
manifest_key_to_object_query["log_entries"] = LogEntry.objects.all()
|
||||
|
||||
with transaction.atomic():
|
||||
manifest_dict = {}
|
||||
|
||||
# Build an overall manifest
|
||||
for key, object_query in manifest_key_to_object_query.items():
|
||||
manifest_dict[key] = list(
|
||||
chain.from_iterable(
|
||||
serialize_queryset_batched(
|
||||
object_query,
|
||||
batch_size=self.batch_size,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
self.encrypt_secret_fields(manifest_dict)
|
||||
|
||||
# These are treated specially and included in the per-document manifest
|
||||
# if that setting is enabled. Otherwise, they are just exported to the bulk
|
||||
# manifest
|
||||
document_map: dict[int, Document] = {
|
||||
d.pk: d for d in manifest_key_to_object_query["documents"]
|
||||
}
|
||||
document_manifest = manifest_dict["documents"]
|
||||
|
||||
# 3. Export files from each document
|
||||
for index, document_dict in tqdm.tqdm(
|
||||
enumerate(document_manifest),
|
||||
total=len(document_manifest),
|
||||
disable=self.no_progress_bar,
|
||||
):
|
||||
document = document_map[document_dict["pk"]]
|
||||
|
||||
# 3.1. generate a unique filename
|
||||
base_name = self.generate_base_name(document)
|
||||
|
||||
# 3.2. write filenames into manifest
|
||||
original_target, thumbnail_target, archive_target = (
|
||||
self.generate_document_targets(document, base_name, document_dict)
|
||||
# Crypto setup before streaming begins
|
||||
if self.passphrase:
|
||||
self.setup_crypto(passphrase=self.passphrase)
|
||||
elif MailAccount.objects.count() > 0 or SocialToken.objects.count() > 0:
|
||||
self.stdout.write(
|
||||
self.style.NOTICE(
|
||||
"No passphrase was given, sensitive fields will be in plaintext",
|
||||
),
|
||||
)
|
||||
|
||||
# 3.3. write files to target folder
|
||||
if not self.data_only:
|
||||
self.copy_document_files(
|
||||
document,
|
||||
original_target,
|
||||
thumbnail_target,
|
||||
archive_target,
|
||||
)
|
||||
|
||||
if self.split_manifest:
|
||||
manifest_name = base_name.with_name(f"{base_name.stem}-manifest.json")
|
||||
if self.use_folder_prefix:
|
||||
manifest_name = Path("json") / manifest_name
|
||||
manifest_name = (self.target / manifest_name).resolve()
|
||||
manifest_name.parent.mkdir(parents=True, exist_ok=True)
|
||||
content = [document_manifest[index]]
|
||||
content += list(
|
||||
filter(
|
||||
lambda d: d["fields"]["document"] == document_dict["pk"],
|
||||
manifest_dict["notes"],
|
||||
),
|
||||
)
|
||||
content += list(
|
||||
filter(
|
||||
lambda d: d["fields"]["document"] == document_dict["pk"],
|
||||
manifest_dict["custom_field_instances"],
|
||||
),
|
||||
)
|
||||
|
||||
self.check_and_write_json(
|
||||
content,
|
||||
manifest_name,
|
||||
)
|
||||
|
||||
# These were exported already
|
||||
if self.split_manifest:
|
||||
del manifest_dict["documents"]
|
||||
del manifest_dict["notes"]
|
||||
del manifest_dict["custom_field_instances"]
|
||||
|
||||
# 4.1 write primary manifest to target folder
|
||||
manifest = []
|
||||
for key, item in manifest_dict.items():
|
||||
manifest.extend(item)
|
||||
document_manifest: list[dict] = []
|
||||
manifest_path = (self.target / "manifest.json").resolve()
|
||||
self.check_and_write_json(
|
||||
manifest,
|
||||
|
||||
with StreamingManifestWriter(
|
||||
manifest_path,
|
||||
)
|
||||
compare_json=self.compare_json,
|
||||
files_in_export_dir=self.files_in_export_dir,
|
||||
) as writer:
|
||||
with transaction.atomic():
|
||||
for key, qs in manifest_key_to_object_query.items():
|
||||
if key == "documents":
|
||||
# Accumulate for file-copy loop; written to manifest after
|
||||
for batch in serialize_queryset_batched(
|
||||
qs,
|
||||
batch_size=self.batch_size,
|
||||
):
|
||||
for record in batch:
|
||||
self._encrypt_record_inline(record)
|
||||
document_manifest.extend(batch)
|
||||
elif self.split_manifest and key in (
|
||||
"notes",
|
||||
"custom_field_instances",
|
||||
):
|
||||
# Written per-document in _write_split_manifest
|
||||
pass
|
||||
else:
|
||||
for batch in serialize_queryset_batched(
|
||||
qs,
|
||||
batch_size=self.batch_size,
|
||||
):
|
||||
for record in batch:
|
||||
self._encrypt_record_inline(record)
|
||||
writer.write_batch(batch)
|
||||
|
||||
document_map: dict[int, Document] = {
|
||||
d.pk: d for d in Document.objects.order_by("id")
|
||||
}
|
||||
|
||||
# 3. Export files from each document
|
||||
for index, document_dict in enumerate(
|
||||
self.track(
|
||||
document_manifest,
|
||||
description="Exporting documents...",
|
||||
total=len(document_manifest),
|
||||
),
|
||||
):
|
||||
document = document_map[document_dict["pk"]]
|
||||
|
||||
# 3.1. generate a unique filename
|
||||
base_name = self.generate_base_name(document)
|
||||
|
||||
# 3.2. write filenames into manifest
|
||||
original_target, thumbnail_target, archive_target = (
|
||||
self.generate_document_targets(document, base_name, document_dict)
|
||||
)
|
||||
|
||||
# 3.3. write files to target folder
|
||||
if not self.data_only:
|
||||
self.copy_document_files(
|
||||
document,
|
||||
original_target,
|
||||
thumbnail_target,
|
||||
archive_target,
|
||||
)
|
||||
|
||||
if self.split_manifest:
|
||||
self._write_split_manifest(document_dict, document, base_name)
|
||||
else:
|
||||
writer.write_record(document_dict)
|
||||
|
||||
# 4.2 write version information to target folder
|
||||
extra_metadata_path = (self.target / "metadata.json").resolve()
|
||||
@@ -532,6 +597,42 @@ class Command(CryptMixin, BaseCommand):
|
||||
archive_target,
|
||||
)
|
||||
|
||||
def _encrypt_record_inline(self, record: dict) -> None:
|
||||
"""Encrypt sensitive fields in a single record, if passphrase is set."""
|
||||
if not self.passphrase:
|
||||
return
|
||||
fields = self.CRYPT_FIELDS_BY_MODEL.get(record.get("model", ""))
|
||||
if fields:
|
||||
for field in fields:
|
||||
if record["fields"].get(field):
|
||||
record["fields"][field] = self.encrypt_string(
|
||||
value=record["fields"][field],
|
||||
)
|
||||
|
||||
def _write_split_manifest(
|
||||
self,
|
||||
document_dict: dict,
|
||||
document: Document,
|
||||
base_name: Path,
|
||||
) -> None:
|
||||
"""Write per-document manifest file for --split-manifest mode."""
|
||||
content = [document_dict]
|
||||
content.extend(
|
||||
serializers.serialize("python", Note.objects.filter(document=document)),
|
||||
)
|
||||
content.extend(
|
||||
serializers.serialize(
|
||||
"python",
|
||||
CustomFieldInstance.objects.filter(document=document),
|
||||
),
|
||||
)
|
||||
manifest_name = base_name.with_name(f"{base_name.stem}-manifest.json")
|
||||
if self.use_folder_prefix:
|
||||
manifest_name = Path("json") / manifest_name
|
||||
manifest_name = (self.target / manifest_name).resolve()
|
||||
manifest_name.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.check_and_write_json(content, manifest_name)
|
||||
|
||||
def check_and_write_json(
|
||||
self,
|
||||
content: list[dict] | dict,
|
||||
@@ -549,14 +650,14 @@ class Command(CryptMixin, BaseCommand):
|
||||
if target in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(target)
|
||||
if self.compare_json:
|
||||
target_checksum = hashlib.md5(target.read_bytes()).hexdigest()
|
||||
target_checksum = hashlib.blake2b(target.read_bytes()).hexdigest()
|
||||
src_str = json.dumps(
|
||||
content,
|
||||
cls=DjangoJSONEncoder,
|
||||
indent=2,
|
||||
ensure_ascii=False,
|
||||
)
|
||||
src_checksum = hashlib.md5(src_str.encode("utf-8")).hexdigest()
|
||||
src_checksum = hashlib.blake2b(src_str.encode("utf-8")).hexdigest()
|
||||
if src_checksum == target_checksum:
|
||||
perform_write = False
|
||||
|
||||
@@ -606,28 +707,3 @@ class Command(CryptMixin, BaseCommand):
|
||||
if perform_copy:
|
||||
target.parent.mkdir(parents=True, exist_ok=True)
|
||||
copy_file_with_basic_stats(source, target)
|
||||
|
||||
def encrypt_secret_fields(self, manifest: dict) -> None:
|
||||
"""
|
||||
Encrypts certain fields in the export. Currently limited to the mail account password
|
||||
"""
|
||||
|
||||
if self.passphrase:
|
||||
self.setup_crypto(passphrase=self.passphrase)
|
||||
|
||||
for crypt_config in self.CRYPT_FIELDS:
|
||||
exporter_key = crypt_config["exporter_key"]
|
||||
crypt_fields = crypt_config["fields"]
|
||||
for manifest_record in manifest[exporter_key]:
|
||||
for field in crypt_fields:
|
||||
if manifest_record["fields"][field]:
|
||||
manifest_record["fields"][field] = self.encrypt_string(
|
||||
value=manifest_record["fields"][field],
|
||||
)
|
||||
|
||||
elif MailAccount.objects.count() > 0 or SocialToken.objects.count() > 0:
|
||||
self.stdout.write(
|
||||
self.style.NOTICE(
|
||||
"No passphrase was given, sensitive fields will be in plaintext",
|
||||
),
|
||||
)
|
||||
|
||||
@@ -40,6 +40,7 @@ def _process_and_match(work: _WorkPackage) -> _WorkResult:
|
||||
class Command(PaperlessCommand):
|
||||
help = "Searches for documents where the content almost matches"
|
||||
|
||||
supports_progress_bar = True
|
||||
supports_multiprocessing = True
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
||||
@@ -8,14 +8,13 @@ from pathlib import Path
|
||||
from zipfile import ZipFile
|
||||
from zipfile import is_zipfile
|
||||
|
||||
import tqdm
|
||||
import ijson
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import FieldDoesNotExist
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from django.core.serializers.base import DeserializationError
|
||||
from django.db import IntegrityError
|
||||
@@ -25,6 +24,7 @@ from django.db.models.signals import post_save
|
||||
from filelock import FileLock
|
||||
|
||||
from documents.file_handling import create_source_path_directory
|
||||
from documents.management.commands.base import PaperlessCommand
|
||||
from documents.management.commands.mixins import CryptMixin
|
||||
from documents.models import Correspondent
|
||||
from documents.models import CustomField
|
||||
@@ -47,6 +47,15 @@ if settings.AUDIT_LOG_ENABLED:
|
||||
from auditlog.registry import auditlog
|
||||
|
||||
|
||||
def iter_manifest_records(path: Path) -> Generator[dict, None, None]:
|
||||
"""Yield records one at a time from a manifest JSON array via ijson."""
|
||||
try:
|
||||
with path.open("rb") as f:
|
||||
yield from ijson.items(f, "item")
|
||||
except ijson.JSONError as e:
|
||||
raise CommandError(f"Failed to parse manifest file {path}: {e}") from e
|
||||
|
||||
|
||||
@contextmanager
|
||||
def disable_signal(sig, receiver, sender, *, weak: bool | None = None) -> Generator:
|
||||
try:
|
||||
@@ -57,21 +66,18 @@ def disable_signal(sig, receiver, sender, *, weak: bool | None = None) -> Genera
|
||||
sig.connect(receiver=receiver, sender=sender, **kwargs)
|
||||
|
||||
|
||||
class Command(CryptMixin, BaseCommand):
|
||||
class Command(CryptMixin, PaperlessCommand):
|
||||
help = (
|
||||
"Using a manifest.json file, load the data from there, and import the "
|
||||
"documents it refers to."
|
||||
)
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
parser.add_argument("source")
|
||||
supports_progress_bar = True
|
||||
supports_multiprocessing = False
|
||||
|
||||
parser.add_argument(
|
||||
"--no-progress-bar",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="If set, the progress bar will not be shown",
|
||||
)
|
||||
def add_arguments(self, parser) -> None:
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument("source")
|
||||
|
||||
parser.add_argument(
|
||||
"--data-only",
|
||||
@@ -147,14 +153,9 @@ class Command(CryptMixin, BaseCommand):
|
||||
Loads manifest data from the various JSON files for parsing and loading the database
|
||||
"""
|
||||
main_manifest_path: Path = self.source / "manifest.json"
|
||||
|
||||
with main_manifest_path.open() as infile:
|
||||
self.manifest = json.load(infile)
|
||||
self.manifest_paths.append(main_manifest_path)
|
||||
|
||||
for file in Path(self.source).glob("**/*-manifest.json"):
|
||||
with file.open() as infile:
|
||||
self.manifest += json.load(infile)
|
||||
self.manifest_paths.append(file)
|
||||
|
||||
def load_metadata(self) -> None:
|
||||
@@ -231,12 +232,10 @@ class Command(CryptMixin, BaseCommand):
|
||||
|
||||
self.source = Path(options["source"]).resolve()
|
||||
self.data_only: bool = options["data_only"]
|
||||
self.no_progress_bar: bool = options["no_progress_bar"]
|
||||
self.passphrase: str | None = options.get("passphrase")
|
||||
self.version: str | None = None
|
||||
self.salt: str | None = None
|
||||
self.manifest_paths = []
|
||||
self.manifest = []
|
||||
|
||||
# Create a temporary directory for extracting a zip file into it, even if supplied source is no zip file to keep code cleaner.
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
@@ -296,6 +295,9 @@ class Command(CryptMixin, BaseCommand):
|
||||
else:
|
||||
self.stdout.write(self.style.NOTICE("Data only import completed"))
|
||||
|
||||
for tmp in getattr(self, "_decrypted_tmp_paths", []):
|
||||
tmp.unlink(missing_ok=True)
|
||||
|
||||
self.stdout.write("Updating search index...")
|
||||
call_command(
|
||||
"document_index",
|
||||
@@ -348,11 +350,12 @@ class Command(CryptMixin, BaseCommand):
|
||||
) from e
|
||||
|
||||
self.stdout.write("Checking the manifest")
|
||||
for record in self.manifest:
|
||||
# Only check if the document files exist if this is not data only
|
||||
# We don't care about documents for a data only import
|
||||
if not self.data_only and record["model"] == "documents.document":
|
||||
check_document_validity(record)
|
||||
for manifest_path in self.manifest_paths:
|
||||
for record in iter_manifest_records(manifest_path):
|
||||
# Only check if the document files exist if this is not data only
|
||||
# We don't care about documents for a data only import
|
||||
if not self.data_only and record["model"] == "documents.document":
|
||||
check_document_validity(record)
|
||||
|
||||
def _import_files_from_manifest(self) -> None:
|
||||
settings.ORIGINALS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
@@ -361,23 +364,31 @@ class Command(CryptMixin, BaseCommand):
|
||||
|
||||
self.stdout.write("Copy files into paperless...")
|
||||
|
||||
manifest_documents = list(
|
||||
filter(lambda r: r["model"] == "documents.document", self.manifest),
|
||||
)
|
||||
document_records = [
|
||||
{
|
||||
"pk": record["pk"],
|
||||
EXPORTER_FILE_NAME: record[EXPORTER_FILE_NAME],
|
||||
EXPORTER_THUMBNAIL_NAME: record.get(EXPORTER_THUMBNAIL_NAME),
|
||||
EXPORTER_ARCHIVE_NAME: record.get(EXPORTER_ARCHIVE_NAME),
|
||||
}
|
||||
for manifest_path in self.manifest_paths
|
||||
for record in iter_manifest_records(manifest_path)
|
||||
if record["model"] == "documents.document"
|
||||
]
|
||||
|
||||
for record in tqdm.tqdm(manifest_documents, disable=self.no_progress_bar):
|
||||
for record in self.track(document_records, description="Copying files..."):
|
||||
document = Document.objects.get(pk=record["pk"])
|
||||
|
||||
doc_file = record[EXPORTER_FILE_NAME]
|
||||
document_path = self.source / doc_file
|
||||
|
||||
if EXPORTER_THUMBNAIL_NAME in record:
|
||||
if record[EXPORTER_THUMBNAIL_NAME]:
|
||||
thumb_file = record[EXPORTER_THUMBNAIL_NAME]
|
||||
thumbnail_path = (self.source / thumb_file).resolve()
|
||||
else:
|
||||
thumbnail_path = None
|
||||
|
||||
if EXPORTER_ARCHIVE_NAME in record:
|
||||
if record[EXPORTER_ARCHIVE_NAME]:
|
||||
archive_file = record[EXPORTER_ARCHIVE_NAME]
|
||||
archive_path = self.source / archive_file
|
||||
else:
|
||||
@@ -418,33 +429,43 @@ class Command(CryptMixin, BaseCommand):
|
||||
|
||||
document.save()
|
||||
|
||||
def _decrypt_record_if_needed(self, record: dict) -> dict:
|
||||
fields = self.CRYPT_FIELDS_BY_MODEL.get(record.get("model", ""))
|
||||
if fields:
|
||||
for field in fields:
|
||||
if record["fields"].get(field):
|
||||
record["fields"][field] = self.decrypt_string(
|
||||
value=record["fields"][field],
|
||||
)
|
||||
return record
|
||||
|
||||
def decrypt_secret_fields(self) -> None:
|
||||
"""
|
||||
The converse decryption of some fields out of the export before importing to database
|
||||
The converse decryption of some fields out of the export before importing to database.
|
||||
Streams records from each manifest path and writes decrypted content to a temp file.
|
||||
"""
|
||||
if self.passphrase:
|
||||
# Salt has been loaded from metadata.json at this point, so it cannot be None
|
||||
self.setup_crypto(passphrase=self.passphrase, salt=self.salt)
|
||||
|
||||
had_at_least_one_record = False
|
||||
|
||||
for crypt_config in self.CRYPT_FIELDS:
|
||||
importer_model: str = crypt_config["model_name"]
|
||||
crypt_fields: str = crypt_config["fields"]
|
||||
for record in filter(
|
||||
lambda x: x["model"] == importer_model,
|
||||
self.manifest,
|
||||
):
|
||||
had_at_least_one_record = True
|
||||
for field in crypt_fields:
|
||||
if record["fields"][field]:
|
||||
record["fields"][field] = self.decrypt_string(
|
||||
value=record["fields"][field],
|
||||
)
|
||||
|
||||
if had_at_least_one_record:
|
||||
# It's annoying, but the DB is loaded from the JSON directly
|
||||
# Maybe could change that in the future?
|
||||
(self.source / "manifest.json").write_text(
|
||||
json.dumps(self.manifest, indent=2, ensure_ascii=False),
|
||||
)
|
||||
if not self.passphrase:
|
||||
return
|
||||
# Salt has been loaded from metadata.json at this point, so it cannot be None
|
||||
self.setup_crypto(passphrase=self.passphrase, salt=self.salt)
|
||||
self._decrypted_tmp_paths: list[Path] = []
|
||||
new_paths: list[Path] = []
|
||||
for manifest_path in self.manifest_paths:
|
||||
tmp = manifest_path.with_name(manifest_path.stem + ".decrypted.json")
|
||||
with tmp.open("w", encoding="utf-8") as out:
|
||||
out.write("[\n")
|
||||
first = True
|
||||
for record in iter_manifest_records(manifest_path):
|
||||
if not first:
|
||||
out.write(",\n")
|
||||
json.dump(
|
||||
self._decrypt_record_if_needed(record),
|
||||
out,
|
||||
indent=2,
|
||||
ensure_ascii=False,
|
||||
)
|
||||
first = False
|
||||
out.write("\n]\n")
|
||||
self._decrypted_tmp_paths.append(tmp)
|
||||
new_paths.append(tmp)
|
||||
self.manifest_paths = new_paths
|
||||
|
||||
@@ -8,6 +8,9 @@ from documents.tasks import index_reindex
|
||||
class Command(PaperlessCommand):
|
||||
help = "Manages the document index."
|
||||
|
||||
supports_progress_bar = True
|
||||
supports_multiprocessing = False
|
||||
|
||||
def add_arguments(self, parser):
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument("command", choices=["reindex", "optimize"])
|
||||
|
||||
@@ -7,6 +7,9 @@ from documents.tasks import llmindex_index
|
||||
class Command(PaperlessCommand):
|
||||
help = "Manages the LLM-based vector index for Paperless."
|
||||
|
||||
supports_progress_bar = True
|
||||
supports_multiprocessing = False
|
||||
|
||||
def add_arguments(self, parser: Any) -> None:
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument("command", choices=["rebuild", "update"])
|
||||
|
||||
@@ -7,6 +7,9 @@ from documents.models import Document
|
||||
class Command(PaperlessCommand):
|
||||
help = "Rename all documents"
|
||||
|
||||
supports_progress_bar = True
|
||||
supports_multiprocessing = False
|
||||
|
||||
def handle(self, *args, **options):
|
||||
for document in self.track(Document.objects.all(), description="Renaming..."):
|
||||
post_save.send(Document, instance=document, created=False)
|
||||
|
||||
@@ -180,6 +180,9 @@ class Command(PaperlessCommand):
|
||||
"modified) after their initial import."
|
||||
)
|
||||
|
||||
supports_progress_bar = True
|
||||
supports_multiprocessing = False
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument("-c", "--correspondent", default=False, action="store_true")
|
||||
|
||||
@@ -24,6 +24,9 @@ _LEVEL_STYLE: dict[int, tuple[str, str]] = {
|
||||
class Command(PaperlessCommand):
|
||||
help = "This command checks your document archive for issues."
|
||||
|
||||
supports_progress_bar = True
|
||||
supports_multiprocessing = False
|
||||
|
||||
def _render_results(self, messages: SanityCheckMessages) -> None:
|
||||
"""Render sanity check results as a Rich table."""
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ def _process_document(doc_id: int) -> None:
|
||||
class Command(PaperlessCommand):
|
||||
help = "This will regenerate the thumbnails for all documents."
|
||||
|
||||
supports_progress_bar = True
|
||||
supports_multiprocessing = True
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import base64
|
||||
import os
|
||||
from argparse import ArgumentParser
|
||||
from typing import TypedDict
|
||||
|
||||
from cryptography.fernet import Fernet
|
||||
@@ -21,25 +20,6 @@ class CryptFields(TypedDict):
|
||||
fields: list[str]
|
||||
|
||||
|
||||
class ProgressBarMixin:
|
||||
"""
|
||||
Many commands use a progress bar, which can be disabled
|
||||
via this class
|
||||
"""
|
||||
|
||||
def add_argument_progress_bar_mixin(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"--no-progress-bar",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="If set, the progress bar will not be shown",
|
||||
)
|
||||
|
||||
def handle_progress_bar_mixin(self, *args, **options) -> None:
|
||||
self.no_progress_bar = options["no_progress_bar"]
|
||||
self.use_progress_bar = not self.no_progress_bar
|
||||
|
||||
|
||||
class CryptMixin:
|
||||
"""
|
||||
Fully based on:
|
||||
@@ -71,7 +51,7 @@ class CryptMixin:
|
||||
key_size = 32
|
||||
kdf_algorithm = "pbkdf2_sha256"
|
||||
|
||||
CRYPT_FIELDS: CryptFields = [
|
||||
CRYPT_FIELDS: list[CryptFields] = [
|
||||
{
|
||||
"exporter_key": "mail_accounts",
|
||||
"model_name": "paperless_mail.mailaccount",
|
||||
@@ -89,6 +69,10 @@ class CryptMixin:
|
||||
],
|
||||
},
|
||||
]
|
||||
# O(1) lookup for per-record encryption; derived from CRYPT_FIELDS at class definition time
|
||||
CRYPT_FIELDS_BY_MODEL: dict[str, list[str]] = {
|
||||
cfg["model_name"]: cfg["fields"] for cfg in CRYPT_FIELDS
|
||||
}
|
||||
|
||||
def get_crypt_params(self) -> dict[str, dict[str, str | int]]:
|
||||
return {
|
||||
|
||||
@@ -9,6 +9,9 @@ class Command(PaperlessCommand):
|
||||
|
||||
help = "Prunes the audit logs of objects that no longer exist."
|
||||
|
||||
supports_progress_bar = True
|
||||
supports_multiprocessing = False
|
||||
|
||||
def handle(self, *args, **options):
|
||||
with transaction.atomic():
|
||||
for log_entry in self.track(
|
||||
|
||||
@@ -169,7 +169,7 @@ def match_storage_paths(document: Document, classifier: DocumentClassifier, user
|
||||
def matches(matching_model: MatchingModel, document: Document):
|
||||
search_flags = 0
|
||||
|
||||
document_content = document.content
|
||||
document_content = document.get_effective_content() or ""
|
||||
|
||||
# Check that match is not empty
|
||||
if not matching_model.match.strip():
|
||||
|
||||
@@ -361,6 +361,42 @@ class Document(SoftDeleteModel, ModelWithOwner): # type: ignore[django-manager-
|
||||
res += f" {self.title}"
|
||||
return res
|
||||
|
||||
def get_effective_content(self) -> str | None:
|
||||
"""
|
||||
Returns the effective content for the document.
|
||||
|
||||
For root documents, this is the latest version's content when available.
|
||||
For version documents, this is always the document's own content.
|
||||
If the queryset already annotated ``effective_content``, that value is used.
|
||||
"""
|
||||
if hasattr(self, "effective_content"):
|
||||
return getattr(self, "effective_content")
|
||||
|
||||
if self.root_document_id is not None or self.pk is None:
|
||||
return self.content
|
||||
|
||||
prefetched_cache = getattr(self, "_prefetched_objects_cache", None)
|
||||
prefetched_versions = (
|
||||
prefetched_cache.get("versions")
|
||||
if isinstance(prefetched_cache, dict)
|
||||
else None
|
||||
)
|
||||
if prefetched_versions:
|
||||
latest_prefetched = max(prefetched_versions, key=lambda doc: doc.id)
|
||||
return latest_prefetched.content
|
||||
|
||||
latest_version_content = (
|
||||
Document.objects.filter(root_document=self)
|
||||
.order_by("-id")
|
||||
.values_list("content", flat=True)
|
||||
.first()
|
||||
)
|
||||
return (
|
||||
latest_version_content
|
||||
if latest_version_content is not None
|
||||
else self.content
|
||||
)
|
||||
|
||||
@property
|
||||
def suggestion_content(self):
|
||||
"""
|
||||
@@ -373,15 +409,21 @@ class Document(SoftDeleteModel, ModelWithOwner): # type: ignore[django-manager-
|
||||
This improves processing speed for large documents while keeping
|
||||
enough context for accurate suggestions.
|
||||
"""
|
||||
if not self.content or len(self.content) <= 1200000:
|
||||
return self.content
|
||||
effective_content = self.get_effective_content()
|
||||
if not effective_content or len(effective_content) <= 1200000:
|
||||
return effective_content
|
||||
else:
|
||||
# Use 80% from the start and 20% from the end
|
||||
# to preserve both opening and closing context.
|
||||
head_len = 800000
|
||||
tail_len = 200000
|
||||
|
||||
return " ".join((self.content[:head_len], self.content[-tail_len:]))
|
||||
return " ".join(
|
||||
(
|
||||
effective_content[:head_len],
|
||||
effective_content[-tail_len:],
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def source_path(self) -> Path:
|
||||
|
||||
@@ -1440,6 +1440,124 @@ class SavedViewSerializer(OwnedObjectSerializer):
|
||||
"set_permissions",
|
||||
]
|
||||
|
||||
def _get_api_version(self) -> int:
|
||||
request = self.context.get("request")
|
||||
return int(
|
||||
request.version if request else settings.REST_FRAMEWORK["DEFAULT_VERSION"],
|
||||
)
|
||||
|
||||
def _update_legacy_visibility_preferences(
|
||||
self,
|
||||
saved_view_id: int,
|
||||
*,
|
||||
show_on_dashboard: bool | None,
|
||||
show_in_sidebar: bool | None,
|
||||
) -> UiSettings | None:
|
||||
if show_on_dashboard is None and show_in_sidebar is None:
|
||||
return None
|
||||
|
||||
request = self.context.get("request")
|
||||
user = request.user if request else self.user
|
||||
if user is None:
|
||||
return None
|
||||
|
||||
ui_settings, _ = UiSettings.objects.get_or_create(
|
||||
user=user,
|
||||
defaults={"settings": {}},
|
||||
)
|
||||
current_settings = (
|
||||
ui_settings.settings if isinstance(ui_settings.settings, dict) else {}
|
||||
)
|
||||
current_settings = dict(current_settings)
|
||||
|
||||
saved_views_settings = current_settings.get("saved_views")
|
||||
if isinstance(saved_views_settings, dict):
|
||||
saved_views_settings = dict(saved_views_settings)
|
||||
else:
|
||||
saved_views_settings = {}
|
||||
|
||||
dashboard_ids = {
|
||||
int(raw_id)
|
||||
for raw_id in saved_views_settings.get("dashboard_views_visible_ids", [])
|
||||
if str(raw_id).isdigit()
|
||||
}
|
||||
sidebar_ids = {
|
||||
int(raw_id)
|
||||
for raw_id in saved_views_settings.get("sidebar_views_visible_ids", [])
|
||||
if str(raw_id).isdigit()
|
||||
}
|
||||
|
||||
if show_on_dashboard is not None:
|
||||
if show_on_dashboard:
|
||||
dashboard_ids.add(saved_view_id)
|
||||
else:
|
||||
dashboard_ids.discard(saved_view_id)
|
||||
if show_in_sidebar is not None:
|
||||
if show_in_sidebar:
|
||||
sidebar_ids.add(saved_view_id)
|
||||
else:
|
||||
sidebar_ids.discard(saved_view_id)
|
||||
|
||||
saved_views_settings["dashboard_views_visible_ids"] = sorted(dashboard_ids)
|
||||
saved_views_settings["sidebar_views_visible_ids"] = sorted(sidebar_ids)
|
||||
current_settings["saved_views"] = saved_views_settings
|
||||
ui_settings.settings = current_settings
|
||||
ui_settings.save(update_fields=["settings"])
|
||||
return ui_settings
|
||||
|
||||
def to_representation(self, instance):
|
||||
# TODO: remove this and related backwards compatibility code when API v9 is dropped
|
||||
ret = super().to_representation(instance)
|
||||
request = self.context.get("request")
|
||||
api_version = self._get_api_version()
|
||||
|
||||
if api_version < 10:
|
||||
dashboard_ids = set()
|
||||
sidebar_ids = set()
|
||||
user = request.user if request else None
|
||||
if user is not None and hasattr(user, "ui_settings"):
|
||||
ui_settings = user.ui_settings.settings or None
|
||||
saved_views = None
|
||||
if isinstance(ui_settings, dict):
|
||||
saved_views = ui_settings.get("saved_views", {})
|
||||
if isinstance(saved_views, dict):
|
||||
dashboard_ids = set(
|
||||
saved_views.get("dashboard_views_visible_ids", []),
|
||||
)
|
||||
sidebar_ids = set(
|
||||
saved_views.get("sidebar_views_visible_ids", []),
|
||||
)
|
||||
ret["show_on_dashboard"] = instance.id in dashboard_ids
|
||||
ret["show_in_sidebar"] = instance.id in sidebar_ids
|
||||
|
||||
return ret
|
||||
|
||||
def to_internal_value(self, data):
|
||||
# TODO: remove this and related backwards compatibility code when API v9 is dropped
|
||||
api_version = self._get_api_version()
|
||||
if api_version >= 10:
|
||||
return super().to_internal_value(data)
|
||||
|
||||
normalized_data = data.copy()
|
||||
legacy_visibility_fields = {}
|
||||
boolean_field = serializers.BooleanField()
|
||||
|
||||
for field_name in ("show_on_dashboard", "show_in_sidebar"):
|
||||
if field_name in normalized_data:
|
||||
try:
|
||||
legacy_visibility_fields[field_name] = (
|
||||
boolean_field.to_internal_value(
|
||||
normalized_data.get(field_name),
|
||||
)
|
||||
)
|
||||
except serializers.ValidationError as exc:
|
||||
raise serializers.ValidationError({field_name: exc.detail})
|
||||
del normalized_data[field_name]
|
||||
|
||||
ret = super().to_internal_value(normalized_data)
|
||||
ret.update(legacy_visibility_fields)
|
||||
return ret
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs = super().validate(attrs)
|
||||
if "display_fields" in attrs and attrs["display_fields"] is not None:
|
||||
@@ -1459,6 +1577,9 @@ class SavedViewSerializer(OwnedObjectSerializer):
|
||||
return attrs
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
request = self.context.get("request")
|
||||
show_on_dashboard = validated_data.pop("show_on_dashboard", None)
|
||||
show_in_sidebar = validated_data.pop("show_in_sidebar", None)
|
||||
if "filter_rules" in validated_data:
|
||||
rules_data = validated_data.pop("filter_rules")
|
||||
else:
|
||||
@@ -1480,9 +1601,19 @@ class SavedViewSerializer(OwnedObjectSerializer):
|
||||
SavedViewFilterRule.objects.filter(saved_view=instance).delete()
|
||||
for rule_data in rules_data:
|
||||
SavedViewFilterRule.objects.create(saved_view=instance, **rule_data)
|
||||
ui_settings = self._update_legacy_visibility_preferences(
|
||||
instance.id,
|
||||
show_on_dashboard=show_on_dashboard,
|
||||
show_in_sidebar=show_in_sidebar,
|
||||
)
|
||||
if request is not None and ui_settings is not None:
|
||||
request.user.ui_settings = ui_settings
|
||||
return instance
|
||||
|
||||
def create(self, validated_data):
|
||||
request = self.context.get("request")
|
||||
show_on_dashboard = validated_data.pop("show_on_dashboard", None)
|
||||
show_in_sidebar = validated_data.pop("show_in_sidebar", None)
|
||||
rules_data = validated_data.pop("filter_rules")
|
||||
if "user" in validated_data:
|
||||
# backwards compatibility
|
||||
@@ -1490,6 +1621,13 @@ class SavedViewSerializer(OwnedObjectSerializer):
|
||||
saved_view = super().create(validated_data)
|
||||
for rule_data in rules_data:
|
||||
SavedViewFilterRule.objects.create(saved_view=saved_view, **rule_data)
|
||||
ui_settings = self._update_legacy_visibility_preferences(
|
||||
saved_view.id,
|
||||
show_on_dashboard=show_on_dashboard,
|
||||
show_in_sidebar=show_in_sidebar,
|
||||
)
|
||||
if request is not None and ui_settings is not None:
|
||||
request.user.ui_settings = ui_settings
|
||||
return saved_view
|
||||
|
||||
|
||||
@@ -1723,6 +1861,15 @@ class BulkEditSerializer(
|
||||
except ValueError:
|
||||
raise serializers.ValidationError("invalid rotation degrees")
|
||||
|
||||
def _validate_source_mode(self, parameters) -> None:
|
||||
source_mode = parameters.get(
|
||||
"source_mode",
|
||||
bulk_edit.SourceModeChoices.LATEST_VERSION,
|
||||
)
|
||||
if source_mode not in bulk_edit.SourceModeChoices.__dict__.values():
|
||||
raise serializers.ValidationError("Invalid source_mode")
|
||||
parameters["source_mode"] = source_mode
|
||||
|
||||
def _validate_parameters_split(self, parameters) -> None:
|
||||
if "pages" not in parameters:
|
||||
raise serializers.ValidationError("pages not specified")
|
||||
@@ -1823,6 +1970,9 @@ class BulkEditSerializer(
|
||||
method = attrs["method"]
|
||||
parameters = attrs["parameters"]
|
||||
|
||||
if "source_mode" in parameters:
|
||||
self._validate_source_mode(parameters)
|
||||
|
||||
if method == bulk_edit.set_correspondent:
|
||||
self._validate_parameters_correspondent(parameters)
|
||||
elif method == bulk_edit.set_document_type:
|
||||
|
||||
@@ -1395,7 +1395,10 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
{
|
||||
"documents": [self.doc2.id],
|
||||
"method": "edit_pdf",
|
||||
"parameters": {"operations": [{"page": 1}]},
|
||||
"parameters": {
|
||||
"operations": [{"page": 1}],
|
||||
"source_mode": "explicit_selection",
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
@@ -1407,6 +1410,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
args, kwargs = m.call_args
|
||||
self.assertCountEqual(args[0], [self.doc2.id])
|
||||
self.assertEqual(kwargs["operations"], [{"page": 1}])
|
||||
self.assertEqual(kwargs["source_mode"], "explicit_selection")
|
||||
self.assertEqual(kwargs["user"], self.user)
|
||||
|
||||
def test_edit_pdf_invalid_params(self) -> None:
|
||||
@@ -1572,6 +1576,24 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
response.content,
|
||||
)
|
||||
|
||||
# invalid source mode
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [self.doc2.id],
|
||||
"method": "edit_pdf",
|
||||
"parameters": {
|
||||
"operations": [{"page": 1}],
|
||||
"source_mode": "not_a_mode",
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn(b"Invalid source_mode", response.content)
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.edit_pdf")
|
||||
def test_edit_pdf_page_out_of_bounds(self, m) -> None:
|
||||
"""
|
||||
|
||||
@@ -41,6 +41,7 @@ from documents.models import SavedView
|
||||
from documents.models import ShareLink
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import UiSettings
|
||||
from documents.models import Workflow
|
||||
from documents.models import WorkflowAction
|
||||
from documents.models import WorkflowTrigger
|
||||
@@ -2200,6 +2201,205 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["count"], 0)
|
||||
|
||||
def test_saved_view_api_version_backward_compatibility(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Saved views and UiSettings with visibility preferences
|
||||
WHEN:
|
||||
- API request with version=9 (legacy)
|
||||
- API request with version=10 (current)
|
||||
THEN:
|
||||
- Version 9 returns show_on_dashboard and show_in_sidebar from UiSettings
|
||||
- Version 10 omits these fields (moved to UiSettings)
|
||||
"""
|
||||
v1 = SavedView.objects.create(
|
||||
owner=self.user,
|
||||
name="dashboard_view",
|
||||
sort_field="created",
|
||||
)
|
||||
v2 = SavedView.objects.create(
|
||||
owner=self.user,
|
||||
name="sidebar_view",
|
||||
sort_field="created",
|
||||
)
|
||||
v3 = SavedView.objects.create(
|
||||
owner=self.user,
|
||||
name="hidden_view",
|
||||
sort_field="created",
|
||||
)
|
||||
|
||||
UiSettings.objects.update_or_create(
|
||||
user=self.user,
|
||||
defaults={
|
||||
"settings": {
|
||||
"saved_views": {
|
||||
"dashboard_views_visible_ids": [v1.id],
|
||||
"sidebar_views_visible_ids": [v2.id],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
response_v9 = self.client.get(
|
||||
"/api/saved_views/",
|
||||
headers={"Accept": "application/json; version=9"},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(response_v9.status_code, status.HTTP_200_OK)
|
||||
results_v9 = {r["id"]: r for r in response_v9.data["results"]}
|
||||
self.assertIn("show_on_dashboard", results_v9[v1.id])
|
||||
self.assertIn("show_in_sidebar", results_v9[v1.id])
|
||||
self.assertTrue(results_v9[v1.id]["show_on_dashboard"])
|
||||
self.assertFalse(results_v9[v1.id]["show_in_sidebar"])
|
||||
self.assertTrue(results_v9[v2.id]["show_in_sidebar"])
|
||||
self.assertFalse(results_v9[v2.id]["show_on_dashboard"])
|
||||
self.assertFalse(results_v9[v3.id]["show_on_dashboard"])
|
||||
self.assertFalse(results_v9[v3.id]["show_in_sidebar"])
|
||||
|
||||
response_v10 = self.client.get(
|
||||
"/api/saved_views/",
|
||||
headers={"Accept": "application/json; version=10"},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(response_v10.status_code, status.HTTP_200_OK)
|
||||
results_v10 = {r["id"]: r for r in response_v10.data["results"]}
|
||||
self.assertNotIn("show_on_dashboard", results_v10[v1.id])
|
||||
self.assertNotIn("show_in_sidebar", results_v10[v1.id])
|
||||
|
||||
def test_saved_view_api_version_9_user_without_ui_settings(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- User with no UiSettings and a saved view
|
||||
WHEN:
|
||||
- API request with version=9
|
||||
THEN:
|
||||
- show_on_dashboard and show_in_sidebar are False (default)
|
||||
"""
|
||||
SavedView.objects.create(
|
||||
owner=self.user,
|
||||
name="test_view",
|
||||
sort_field="created",
|
||||
)
|
||||
UiSettings.objects.filter(user=self.user).delete()
|
||||
|
||||
response = self.client.get(
|
||||
"/api/saved_views/",
|
||||
headers={"Accept": "application/json; version=9"},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
result = response.data["results"][0]
|
||||
self.assertFalse(result["show_on_dashboard"])
|
||||
self.assertFalse(result["show_in_sidebar"])
|
||||
|
||||
def test_saved_view_api_version_9_create_writes_visibility_to_ui_settings(
|
||||
self,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- No UiSettings for the current user
|
||||
WHEN:
|
||||
- A saved view is created through API version 9 with visibility flags
|
||||
THEN:
|
||||
- Visibility is persisted in UiSettings.saved_views
|
||||
"""
|
||||
UiSettings.objects.filter(user=self.user).delete()
|
||||
|
||||
response = self.client.post(
|
||||
"/api/saved_views/",
|
||||
{
|
||||
"name": "legacy-v9-create",
|
||||
"sort_field": "created",
|
||||
"filter_rules": [],
|
||||
"show_on_dashboard": True,
|
||||
"show_in_sidebar": False,
|
||||
},
|
||||
headers={"Accept": "application/json; version=9"},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
self.assertTrue(response.data["show_on_dashboard"])
|
||||
self.assertFalse(response.data["show_in_sidebar"])
|
||||
|
||||
self.user.refresh_from_db()
|
||||
self.assertTrue(hasattr(self.user, "ui_settings"))
|
||||
saved_view_settings = self.user.ui_settings.settings["saved_views"]
|
||||
self.assertListEqual(
|
||||
saved_view_settings["dashboard_views_visible_ids"],
|
||||
[response.data["id"]],
|
||||
)
|
||||
self.assertListEqual(saved_view_settings["sidebar_views_visible_ids"], [])
|
||||
|
||||
def test_saved_view_api_version_9_patch_writes_visibility_to_ui_settings(
|
||||
self,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing saved views and UiSettings visibility ids
|
||||
WHEN:
|
||||
- A saved view is updated through API version 9 visibility flags
|
||||
THEN:
|
||||
- The per-user UiSettings visibility ids are updated
|
||||
"""
|
||||
v1 = SavedView.objects.create(
|
||||
owner=self.user,
|
||||
name="legacy-v9-patch-1",
|
||||
sort_field="created",
|
||||
)
|
||||
v2 = SavedView.objects.create(
|
||||
owner=self.user,
|
||||
name="legacy-v9-patch-2",
|
||||
sort_field="created",
|
||||
)
|
||||
UiSettings.objects.update_or_create(
|
||||
user=self.user,
|
||||
defaults={
|
||||
"settings": {
|
||||
"saved_views": {
|
||||
"dashboard_views_visible_ids": [v1.id],
|
||||
"sidebar_views_visible_ids": [v1.id, v2.id],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
response = self.client.patch(
|
||||
f"/api/saved_views/{v1.id}/",
|
||||
{
|
||||
"show_on_dashboard": False,
|
||||
},
|
||||
headers={"Accept": "application/json; version=9"},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertFalse(response.data["show_on_dashboard"])
|
||||
self.assertTrue(response.data["show_in_sidebar"])
|
||||
|
||||
self.user.refresh_from_db()
|
||||
saved_view_settings = self.user.ui_settings.settings["saved_views"]
|
||||
self.assertListEqual(saved_view_settings["dashboard_views_visible_ids"], [])
|
||||
self.assertListEqual(
|
||||
saved_view_settings["sidebar_views_visible_ids"],
|
||||
[v1.id, v2.id],
|
||||
)
|
||||
|
||||
response = self.client.patch(
|
||||
f"/api/saved_views/{v1.id}/",
|
||||
{
|
||||
"show_in_sidebar": False,
|
||||
},
|
||||
headers={"Accept": "application/json; version=9"},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertFalse(response.data["show_on_dashboard"])
|
||||
self.assertFalse(response.data["show_in_sidebar"])
|
||||
|
||||
self.user.refresh_from_db()
|
||||
saved_view_settings = self.user.ui_settings.settings["saved_views"]
|
||||
self.assertListEqual(saved_view_settings["dashboard_views_visible_ids"], [])
|
||||
self.assertListEqual(saved_view_settings["sidebar_views_visible_ids"], [v2.id])
|
||||
|
||||
def test_saved_view_create_update_patch(self) -> None:
|
||||
User.objects.create_user("user1")
|
||||
|
||||
|
||||
@@ -405,7 +405,9 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
self.assertTrue(Document.objects.filter(id=self.doc1.id).exists())
|
||||
self.assertFalse(Document.objects.filter(id=version.id).exists())
|
||||
|
||||
def test_get_root_and_current_doc_mapping(self) -> None:
|
||||
def test_resolve_root_and_source_doc_latest_version_prefers_newest_version(
|
||||
self,
|
||||
) -> None:
|
||||
version1 = Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
@@ -417,18 +419,14 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
root_document=self.doc2,
|
||||
)
|
||||
|
||||
root_ids_by_doc_id = bulk_edit._get_root_ids_by_doc_id(
|
||||
[self.doc2.id, version1.id, version2.id],
|
||||
root_doc, source_doc = bulk_edit._resolve_root_and_source_doc(
|
||||
self.doc2,
|
||||
source_mode="latest_version",
|
||||
)
|
||||
self.assertEqual(root_ids_by_doc_id[self.doc2.id], self.doc2.id)
|
||||
self.assertEqual(root_ids_by_doc_id[version1.id], self.doc2.id)
|
||||
self.assertEqual(root_ids_by_doc_id[version2.id], self.doc2.id)
|
||||
|
||||
root_docs, current_docs = bulk_edit._get_root_and_current_docs_by_root_id(
|
||||
{self.doc2.id},
|
||||
)
|
||||
self.assertEqual(root_docs[self.doc2.id].id, self.doc2.id)
|
||||
self.assertEqual(current_docs[self.doc2.id].id, version2.id)
|
||||
self.assertEqual(root_doc.id, self.doc2.id)
|
||||
self.assertEqual(source_doc.id, version2.id)
|
||||
self.assertNotEqual(source_doc.id, version1.id)
|
||||
|
||||
@mock.patch("documents.tasks.bulk_update_documents.delay")
|
||||
def test_set_permissions(self, m) -> None:
|
||||
@@ -662,6 +660,33 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("pikepdf.open")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_merge_uses_latest_version_source_for_root_selection(
|
||||
self,
|
||||
mock_consume_file,
|
||||
mock_open_pdf,
|
||||
) -> None:
|
||||
version_file = self.dirs.scratch_dir / "sample2_version_merge.pdf"
|
||||
shutil.copy(self.doc2.source_path, version_file)
|
||||
version = Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
root_document=self.doc2,
|
||||
filename=version_file,
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pdf_version = "1.7"
|
||||
fake_pdf.pages = [mock.Mock()]
|
||||
mock_open_pdf.return_value.__enter__.return_value = fake_pdf
|
||||
|
||||
result = bulk_edit.merge([self.doc2.id])
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open_pdf.assert_called_once_with(str(version.source_path))
|
||||
mock_consume_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.delete.si")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_merge_and_delete_originals(
|
||||
@@ -870,6 +895,36 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("pikepdf.open")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_split_uses_latest_version_source_for_root_selection(
|
||||
self,
|
||||
mock_consume_file,
|
||||
mock_open_pdf,
|
||||
mock_group,
|
||||
) -> None:
|
||||
version_file = self.dirs.scratch_dir / "sample2_version_split.pdf"
|
||||
shutil.copy(self.doc2.source_path, version_file)
|
||||
version = Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
root_document=self.doc2,
|
||||
filename=version_file,
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock()]
|
||||
mock_open_pdf.return_value.__enter__.return_value = fake_pdf
|
||||
mock_group.return_value.delay.return_value = None
|
||||
|
||||
result = bulk_edit.split([self.doc2.id], [[1], [2]])
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open_pdf.assert_called_once_with(version.source_path)
|
||||
mock_consume_file.assert_not_called()
|
||||
mock_group.return_value.delay.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.delete.si")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@mock.patch("documents.bulk_edit.chord")
|
||||
@@ -1041,6 +1096,34 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertIsNotNone(overrides)
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_rotate_explicit_selection_uses_root_source_when_root_selected(
|
||||
self,
|
||||
mock_open,
|
||||
mock_consume_delay,
|
||||
mock_magic,
|
||||
):
|
||||
Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
root_document=self.doc2,
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock()]
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
|
||||
result = bulk_edit.rotate(
|
||||
[self.doc2.id],
|
||||
90,
|
||||
source_mode="explicit_selection",
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(self.doc2.source_path)
|
||||
mock_consume_delay.assert_called_once()
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@@ -1065,6 +1148,34 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertIsNotNone(overrides)
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_delete_pages_explicit_selection_uses_root_source_when_root_selected(
|
||||
self,
|
||||
mock_open,
|
||||
mock_consume_delay,
|
||||
mock_magic,
|
||||
):
|
||||
Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
root_document=self.doc2,
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock()]
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
|
||||
result = bulk_edit.delete_pages(
|
||||
[self.doc2.id],
|
||||
[1],
|
||||
source_mode="explicit_selection",
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(self.doc2.source_path)
|
||||
mock_consume_delay.assert_called_once()
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_delete_pages_with_error(self, mock_pdf_save, mock_consume_delay):
|
||||
@@ -1213,6 +1324,40 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertTrue(str(consumable.original_file).endswith("_edited.pdf"))
|
||||
self.assertIsNotNone(overrides)
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.new")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_edit_pdf_explicit_selection_uses_root_source_when_root_selected(
|
||||
self,
|
||||
mock_open,
|
||||
mock_new,
|
||||
mock_consume_delay,
|
||||
mock_magic,
|
||||
):
|
||||
Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
root_document=self.doc2,
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock()]
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
output_pdf = mock.MagicMock()
|
||||
output_pdf.pages = []
|
||||
mock_new.return_value = output_pdf
|
||||
|
||||
result = bulk_edit.edit_pdf(
|
||||
[self.doc2.id],
|
||||
operations=[{"page": 1}],
|
||||
update_document=True,
|
||||
source_mode="explicit_selection",
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(self.doc2.source_path)
|
||||
mock_consume_delay.assert_called_once()
|
||||
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_edit_pdf_without_metadata(
|
||||
@@ -1333,6 +1478,34 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(consumable.root_document_id, doc.id)
|
||||
self.assertIsNotNone(overrides)
|
||||
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_explicit_selection_uses_root_source_when_root_selected(
|
||||
self,
|
||||
mock_open,
|
||||
mock_consume_delay,
|
||||
mock_magic,
|
||||
) -> None:
|
||||
Document.objects.create(
|
||||
checksum="A-v1",
|
||||
title="A version 1",
|
||||
root_document=self.doc1,
|
||||
)
|
||||
fake_pdf = mock.MagicMock()
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
|
||||
result = bulk_edit.remove_password(
|
||||
[self.doc1.id],
|
||||
password="secret",
|
||||
update_document=True,
|
||||
source_mode="explicit_selection",
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(self.doc1.source_path, password="secret")
|
||||
mock_consume_delay.assert_called_once()
|
||||
|
||||
@mock.patch("documents.bulk_edit.chord")
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
|
||||
@@ -156,6 +156,46 @@ class TestDocument(TestCase):
|
||||
)
|
||||
self.assertEqual(doc.get_public_filename(), "2020-12-25 test")
|
||||
|
||||
def test_suggestion_content_uses_latest_version_content_for_root_documents(
|
||||
self,
|
||||
) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
content="outdated root content",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="latest version content",
|
||||
)
|
||||
|
||||
self.assertEqual(root.suggestion_content, version.content)
|
||||
|
||||
def test_content_length_is_per_document_row_for_versions(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
content="abc",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="abcdefgh",
|
||||
)
|
||||
|
||||
root.refresh_from_db()
|
||||
version.refresh_from_db()
|
||||
|
||||
self.assertEqual(root.content_length, 3)
|
||||
self.assertEqual(version.content_length, 8)
|
||||
|
||||
|
||||
def test_suggestion_content() -> None:
|
||||
"""
|
||||
|
||||
@@ -753,6 +753,31 @@ class TestExportImport(
|
||||
call_command("document_importer", "--no-progress-bar", self.target)
|
||||
self.assertEqual(Document.objects.count(), 4)
|
||||
|
||||
def test_folder_prefix_with_split(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Request to export documents to directory
|
||||
WHEN:
|
||||
- Option use_folder_prefix is used
|
||||
- Option split manifest is used
|
||||
THEN:
|
||||
- Documents can be imported again
|
||||
"""
|
||||
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
|
||||
shutil.copytree(
|
||||
Path(__file__).parent / "samples" / "documents",
|
||||
Path(self.dirs.media_dir) / "documents",
|
||||
)
|
||||
|
||||
self._do_export(use_folder_prefix=True, split_manifest=True)
|
||||
|
||||
with paperless_environment():
|
||||
self.assertEqual(Document.objects.count(), 4)
|
||||
Document.objects.all().delete()
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
call_command("document_importer", "--no-progress-bar", self.target)
|
||||
self.assertEqual(Document.objects.count(), 4)
|
||||
|
||||
def test_import_db_transaction_failed(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
|
||||
@@ -119,15 +119,22 @@ class TestCommandImport(
|
||||
# No read permissions
|
||||
original_path.chmod(0o222)
|
||||
|
||||
manifest_path = Path(temp_dir) / "manifest.json"
|
||||
manifest_path.write_text(
|
||||
json.dumps(
|
||||
[
|
||||
{
|
||||
"model": "documents.document",
|
||||
EXPORTER_FILE_NAME: "original.pdf",
|
||||
EXPORTER_ARCHIVE_NAME: "archive.pdf",
|
||||
},
|
||||
],
|
||||
),
|
||||
)
|
||||
|
||||
cmd = Command()
|
||||
cmd.source = Path(temp_dir)
|
||||
cmd.manifest = [
|
||||
{
|
||||
"model": "documents.document",
|
||||
EXPORTER_FILE_NAME: "original.pdf",
|
||||
EXPORTER_ARCHIVE_NAME: "archive.pdf",
|
||||
},
|
||||
]
|
||||
cmd.manifest_paths = [manifest_path]
|
||||
cmd.data_only = False
|
||||
with self.assertRaises(CommandError) as cm:
|
||||
cmd.check_manifest_validity()
|
||||
@@ -296,7 +303,7 @@ class TestCommandImport(
|
||||
(self.dirs.scratch_dir / "manifest.json").touch()
|
||||
|
||||
# We're not building a manifest, so it fails, but this test doesn't care
|
||||
with self.assertRaises(json.decoder.JSONDecodeError):
|
||||
with self.assertRaises(CommandError):
|
||||
call_command(
|
||||
"document_importer",
|
||||
"--no-progress-bar",
|
||||
@@ -325,7 +332,7 @@ class TestCommandImport(
|
||||
)
|
||||
|
||||
# We're not building a manifest, so it fails, but this test doesn't care
|
||||
with self.assertRaises(json.decoder.JSONDecodeError):
|
||||
with self.assertRaises(CommandError):
|
||||
call_command(
|
||||
"document_importer",
|
||||
"--no-progress-bar",
|
||||
|
||||
@@ -48,6 +48,52 @@ class _TestMatchingBase(TestCase):
|
||||
|
||||
|
||||
class TestMatching(_TestMatchingBase):
|
||||
def test_matches_uses_latest_version_content_for_root_documents(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
content="root content without token",
|
||||
)
|
||||
Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="latest version contains keyword",
|
||||
)
|
||||
tag = Tag.objects.create(
|
||||
name="tag",
|
||||
match="keyword",
|
||||
matching_algorithm=Tag.MATCH_ANY,
|
||||
)
|
||||
|
||||
self.assertTrue(matching.matches(tag, root))
|
||||
|
||||
def test_matches_does_not_fall_back_to_root_content_when_version_exists(
|
||||
self,
|
||||
) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
content="root contains keyword",
|
||||
)
|
||||
Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="latest version without token",
|
||||
)
|
||||
tag = Tag.objects.create(
|
||||
name="tag",
|
||||
match="keyword",
|
||||
matching_algorithm=Tag.MATCH_ANY,
|
||||
)
|
||||
|
||||
self.assertFalse(matching.matches(tag, root))
|
||||
|
||||
def test_match_none(self) -> None:
|
||||
self._test_matching(
|
||||
"",
|
||||
|
||||
@@ -2,7 +2,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: paperless-ngx\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2026-03-04 23:29+0000\n"
|
||||
"POT-Creation-Date: 2026-03-09 17:44+0000\n"
|
||||
"PO-Revision-Date: 2022-02-17 04:17\n"
|
||||
"Last-Translator: \n"
|
||||
"Language-Team: English\n"
|
||||
@@ -1299,7 +1299,7 @@ msgstr ""
|
||||
msgid "workflow runs"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:463 documents/serialisers.py:2332
|
||||
#: documents/serialisers.py:463 documents/serialisers.py:2482
|
||||
msgid "Insufficient permissions."
|
||||
msgstr ""
|
||||
|
||||
@@ -1307,39 +1307,39 @@ msgstr ""
|
||||
msgid "Invalid color."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:1955
|
||||
#: documents/serialisers.py:2105
|
||||
#, python-format
|
||||
msgid "File type %(type)s not supported"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:1999
|
||||
#: documents/serialisers.py:2149
|
||||
#, python-format
|
||||
msgid "Custom field id must be an integer: %(id)s"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2006
|
||||
#: documents/serialisers.py:2156
|
||||
#, python-format
|
||||
msgid "Custom field with id %(id)s does not exist"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2023 documents/serialisers.py:2033
|
||||
#: documents/serialisers.py:2173 documents/serialisers.py:2183
|
||||
msgid ""
|
||||
"Custom fields must be a list of integers or an object mapping ids to values."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2028
|
||||
#: documents/serialisers.py:2178
|
||||
msgid "Some custom fields don't exist or were specified twice."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2175
|
||||
#: documents/serialisers.py:2325
|
||||
msgid "Invalid variable detected."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2388
|
||||
#: documents/serialisers.py:2538
|
||||
msgid "Duplicate document identifiers are not allowed."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2418 documents/views.py:3328
|
||||
#: documents/serialisers.py:2568 documents/views.py:3328
|
||||
#, python-format
|
||||
msgid "Documents not found: %(ids)s"
|
||||
msgstr ""
|
||||
@@ -1856,151 +1856,151 @@ msgstr ""
|
||||
msgid "paperless application settings"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:752
|
||||
#: paperless/settings/__init__.py:521
|
||||
msgid "English (US)"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:753
|
||||
#: paperless/settings/__init__.py:522
|
||||
msgid "Arabic"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:754
|
||||
#: paperless/settings/__init__.py:523
|
||||
msgid "Afrikaans"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:755
|
||||
#: paperless/settings/__init__.py:524
|
||||
msgid "Belarusian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:756
|
||||
#: paperless/settings/__init__.py:525
|
||||
msgid "Bulgarian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:757
|
||||
#: paperless/settings/__init__.py:526
|
||||
msgid "Catalan"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:758
|
||||
#: paperless/settings/__init__.py:527
|
||||
msgid "Czech"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:759
|
||||
#: paperless/settings/__init__.py:528
|
||||
msgid "Danish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:760
|
||||
#: paperless/settings/__init__.py:529
|
||||
msgid "German"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:761
|
||||
#: paperless/settings/__init__.py:530
|
||||
msgid "Greek"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:762
|
||||
#: paperless/settings/__init__.py:531
|
||||
msgid "English (GB)"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:763
|
||||
#: paperless/settings/__init__.py:532
|
||||
msgid "Spanish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:764
|
||||
#: paperless/settings/__init__.py:533
|
||||
msgid "Persian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:765
|
||||
#: paperless/settings/__init__.py:534
|
||||
msgid "Finnish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:766
|
||||
#: paperless/settings/__init__.py:535
|
||||
msgid "French"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:767
|
||||
#: paperless/settings/__init__.py:536
|
||||
msgid "Hungarian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:768
|
||||
#: paperless/settings/__init__.py:537
|
||||
msgid "Indonesian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:769
|
||||
#: paperless/settings/__init__.py:538
|
||||
msgid "Italian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:770
|
||||
#: paperless/settings/__init__.py:539
|
||||
msgid "Japanese"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:771
|
||||
#: paperless/settings/__init__.py:540
|
||||
msgid "Korean"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:772
|
||||
#: paperless/settings/__init__.py:541
|
||||
msgid "Luxembourgish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:773
|
||||
#: paperless/settings/__init__.py:542
|
||||
msgid "Norwegian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:774
|
||||
#: paperless/settings/__init__.py:543
|
||||
msgid "Dutch"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:775
|
||||
#: paperless/settings/__init__.py:544
|
||||
msgid "Polish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:776
|
||||
#: paperless/settings/__init__.py:545
|
||||
msgid "Portuguese (Brazil)"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:777
|
||||
#: paperless/settings/__init__.py:546
|
||||
msgid "Portuguese"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:778
|
||||
#: paperless/settings/__init__.py:547
|
||||
msgid "Romanian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:779
|
||||
#: paperless/settings/__init__.py:548
|
||||
msgid "Russian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:780
|
||||
#: paperless/settings/__init__.py:549
|
||||
msgid "Slovak"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:781
|
||||
#: paperless/settings/__init__.py:550
|
||||
msgid "Slovenian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:782
|
||||
#: paperless/settings/__init__.py:551
|
||||
msgid "Serbian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:783
|
||||
#: paperless/settings/__init__.py:552
|
||||
msgid "Swedish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:784
|
||||
#: paperless/settings/__init__.py:553
|
||||
msgid "Turkish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:785
|
||||
#: paperless/settings/__init__.py:554
|
||||
msgid "Ukrainian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:786
|
||||
#: paperless/settings/__init__.py:555
|
||||
msgid "Vietnamese"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:787
|
||||
#: paperless/settings/__init__.py:556
|
||||
msgid "Chinese Simplified"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings/__init__.py:788
|
||||
#: paperless/settings/__init__.py:557
|
||||
msgid "Chinese Traditional"
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -6,18 +6,25 @@ import math
|
||||
import multiprocessing
|
||||
import os
|
||||
import tempfile
|
||||
from os import PathLike
|
||||
from pathlib import Path
|
||||
from typing import Final
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from celery.schedules import crontab
|
||||
from compression_middleware.middleware import CompressionMiddleware
|
||||
from dateparser.languages.loader import LocaleDataLoader
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from paperless.settings.custom import parse_beat_schedule
|
||||
from paperless.settings.custom import parse_dateparser_languages
|
||||
from paperless.settings.custom import parse_db_settings
|
||||
from paperless.settings.custom import parse_hosting_settings
|
||||
from paperless.settings.custom import parse_ignore_dates
|
||||
from paperless.settings.custom import parse_redis_url
|
||||
from paperless.settings.parsers import get_bool_from_env
|
||||
from paperless.settings.parsers import get_float_from_env
|
||||
from paperless.settings.parsers import get_int_from_env
|
||||
from paperless.settings.parsers import get_list_from_env
|
||||
from paperless.settings.parsers import get_path_from_env
|
||||
|
||||
logger = logging.getLogger("paperless.settings")
|
||||
|
||||
@@ -45,239 +52,8 @@ for path in [
|
||||
os.environ["OMP_THREAD_LIMIT"] = "1"
|
||||
|
||||
|
||||
def __get_boolean(key: str, default: str = "NO") -> bool:
|
||||
"""
|
||||
Return a boolean value based on whatever the user has supplied in the
|
||||
environment based on whether the value "looks like" it's True or not.
|
||||
"""
|
||||
return bool(os.getenv(key, default).lower() in ("yes", "y", "1", "t", "true"))
|
||||
|
||||
|
||||
def __get_int(key: str, default: int) -> int:
|
||||
"""
|
||||
Return an integer value based on the environment variable or a default
|
||||
"""
|
||||
return int(os.getenv(key, default))
|
||||
|
||||
|
||||
def __get_optional_int(key: str) -> int | None:
|
||||
"""
|
||||
Returns None if the environment key is not present, otherwise an integer
|
||||
"""
|
||||
if key in os.environ:
|
||||
return __get_int(key, -1) # pragma: no cover
|
||||
return None
|
||||
|
||||
|
||||
def __get_float(key: str, default: float) -> float:
|
||||
"""
|
||||
Return an integer value based on the environment variable or a default
|
||||
"""
|
||||
return float(os.getenv(key, default))
|
||||
|
||||
|
||||
def __get_path(
|
||||
key: str,
|
||||
default: PathLike | str,
|
||||
) -> Path:
|
||||
"""
|
||||
Return a normalized, absolute path based on the environment variable or a default,
|
||||
if provided
|
||||
"""
|
||||
if key in os.environ:
|
||||
return Path(os.environ[key]).resolve()
|
||||
return Path(default).resolve()
|
||||
|
||||
|
||||
def __get_optional_path(key: str) -> Path | None:
|
||||
"""
|
||||
Returns None if the environment key is not present, otherwise a fully resolved Path
|
||||
"""
|
||||
if key in os.environ:
|
||||
return __get_path(key, "")
|
||||
return None
|
||||
|
||||
|
||||
def __get_list(
|
||||
key: str,
|
||||
default: list[str] | None = None,
|
||||
sep: str = ",",
|
||||
) -> list[str]:
|
||||
"""
|
||||
Return a list of elements from the environment, as separated by the given
|
||||
string, or the default if the key does not exist
|
||||
"""
|
||||
if key in os.environ:
|
||||
return list(filter(None, os.environ[key].split(sep)))
|
||||
elif default is not None:
|
||||
return default
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def _parse_redis_url(env_redis: str | None) -> tuple[str, str]:
|
||||
"""
|
||||
Gets the Redis information from the environment or a default and handles
|
||||
converting from incompatible django_channels and celery formats.
|
||||
|
||||
Returns a tuple of (celery_url, channels_url)
|
||||
"""
|
||||
|
||||
# Not set, return a compatible default
|
||||
if env_redis is None:
|
||||
return ("redis://localhost:6379", "redis://localhost:6379")
|
||||
|
||||
if "unix" in env_redis.lower():
|
||||
# channels_redis socket format, looks like:
|
||||
# "unix:///path/to/redis.sock"
|
||||
_, path = env_redis.split(":", 1)
|
||||
# Optionally setting a db number
|
||||
if "?db=" in env_redis:
|
||||
path, number = path.split("?db=")
|
||||
return (f"redis+socket:{path}?virtual_host={number}", env_redis)
|
||||
else:
|
||||
return (f"redis+socket:{path}", env_redis)
|
||||
|
||||
elif "+socket" in env_redis.lower():
|
||||
# celery socket style, looks like:
|
||||
# "redis+socket:///path/to/redis.sock"
|
||||
_, path = env_redis.split(":", 1)
|
||||
if "?virtual_host=" in env_redis:
|
||||
# Virtual host (aka db number)
|
||||
path, number = path.split("?virtual_host=")
|
||||
return (env_redis, f"unix:{path}?db={number}")
|
||||
else:
|
||||
return (env_redis, f"unix:{path}")
|
||||
|
||||
# Not a socket
|
||||
return (env_redis, env_redis)
|
||||
|
||||
|
||||
def _parse_beat_schedule() -> dict:
|
||||
"""
|
||||
Configures the scheduled tasks, according to default or
|
||||
environment variables. Task expiration is configured so the task will
|
||||
expire (and not run), shortly before the default frequency will put another
|
||||
of the same task into the queue
|
||||
|
||||
|
||||
https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html#beat-entries
|
||||
https://docs.celeryq.dev/en/latest/userguide/calling.html#expiration
|
||||
"""
|
||||
schedule = {}
|
||||
tasks = [
|
||||
{
|
||||
"name": "Check all e-mail accounts",
|
||||
"env_key": "PAPERLESS_EMAIL_TASK_CRON",
|
||||
# Default every ten minutes
|
||||
"env_default": "*/10 * * * *",
|
||||
"task": "paperless_mail.tasks.process_mail_accounts",
|
||||
"options": {
|
||||
# 1 minute before default schedule sends again
|
||||
"expires": 9.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Train the classifier",
|
||||
"env_key": "PAPERLESS_TRAIN_TASK_CRON",
|
||||
# Default hourly at 5 minutes past the hour
|
||||
"env_default": "5 */1 * * *",
|
||||
"task": "documents.tasks.train_classifier",
|
||||
"options": {
|
||||
# 1 minute before default schedule sends again
|
||||
"expires": 59.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Optimize the index",
|
||||
"env_key": "PAPERLESS_INDEX_TASK_CRON",
|
||||
# Default daily at midnight
|
||||
"env_default": "0 0 * * *",
|
||||
"task": "documents.tasks.index_optimize",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Perform sanity check",
|
||||
"env_key": "PAPERLESS_SANITY_TASK_CRON",
|
||||
# Default Sunday at 00:30
|
||||
"env_default": "30 0 * * sun",
|
||||
"task": "documents.tasks.sanity_check",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": ((7.0 * 24.0) - 1.0) * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Empty trash",
|
||||
"env_key": "PAPERLESS_EMPTY_TRASH_TASK_CRON",
|
||||
# Default daily at 01:00
|
||||
"env_default": "0 1 * * *",
|
||||
"task": "documents.tasks.empty_trash",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Check and run scheduled workflows",
|
||||
"env_key": "PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON",
|
||||
# Default hourly at 5 minutes past the hour
|
||||
"env_default": "5 */1 * * *",
|
||||
"task": "documents.tasks.check_scheduled_workflows",
|
||||
"options": {
|
||||
# 1 minute before default schedule sends again
|
||||
"expires": 59.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Rebuild LLM index",
|
||||
"env_key": "PAPERLESS_LLM_INDEX_TASK_CRON",
|
||||
# Default daily at 02:10
|
||||
"env_default": "10 2 * * *",
|
||||
"task": "documents.tasks.llmindex_index",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Cleanup expired share link bundles",
|
||||
"env_key": "PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON",
|
||||
# Default daily at 02:00
|
||||
"env_default": "0 2 * * *",
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
]
|
||||
for task in tasks:
|
||||
# Either get the environment setting or use the default
|
||||
value = os.getenv(task["env_key"], task["env_default"])
|
||||
# Don't add disabled tasks to the schedule
|
||||
if value == "disable":
|
||||
continue
|
||||
# I find https://crontab.guru/ super helpful
|
||||
# crontab(5) format
|
||||
# - five time-and-date fields
|
||||
# - separated by at least one blank
|
||||
minute, hour, day_month, month, day_week = value.split(" ")
|
||||
|
||||
schedule[task["name"]] = {
|
||||
"task": task["task"],
|
||||
"schedule": crontab(minute, hour, day_week, day_month, month),
|
||||
"options": task["options"],
|
||||
}
|
||||
|
||||
return schedule
|
||||
|
||||
|
||||
# NEVER RUN WITH DEBUG IN PRODUCTION.
|
||||
DEBUG = __get_boolean("PAPERLESS_DEBUG", "NO")
|
||||
DEBUG = get_bool_from_env("PAPERLESS_DEBUG", "NO")
|
||||
|
||||
|
||||
###############################################################################
|
||||
@@ -286,21 +62,21 @@ DEBUG = __get_boolean("PAPERLESS_DEBUG", "NO")
|
||||
|
||||
BASE_DIR: Path = Path(__file__).resolve().parent.parent.parent
|
||||
|
||||
STATIC_ROOT = __get_path("PAPERLESS_STATICDIR", BASE_DIR.parent / "static")
|
||||
STATIC_ROOT = get_path_from_env("PAPERLESS_STATICDIR", BASE_DIR.parent / "static")
|
||||
|
||||
MEDIA_ROOT = __get_path("PAPERLESS_MEDIA_ROOT", BASE_DIR.parent / "media")
|
||||
MEDIA_ROOT = get_path_from_env("PAPERLESS_MEDIA_ROOT", BASE_DIR.parent / "media")
|
||||
ORIGINALS_DIR = MEDIA_ROOT / "documents" / "originals"
|
||||
ARCHIVE_DIR = MEDIA_ROOT / "documents" / "archive"
|
||||
THUMBNAIL_DIR = MEDIA_ROOT / "documents" / "thumbnails"
|
||||
SHARE_LINK_BUNDLE_DIR = MEDIA_ROOT / "documents" / "share_link_bundles"
|
||||
|
||||
DATA_DIR = __get_path("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")
|
||||
DATA_DIR = get_path_from_env("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")
|
||||
|
||||
NLTK_DIR = __get_path("PAPERLESS_NLTK_DIR", "/usr/share/nltk_data")
|
||||
NLTK_DIR = get_path_from_env("PAPERLESS_NLTK_DIR", "/usr/share/nltk_data")
|
||||
|
||||
# Check deprecated setting first
|
||||
EMPTY_TRASH_DIR = (
|
||||
__get_path("PAPERLESS_TRASH_DIR", os.getenv("PAPERLESS_EMPTY_TRASH_DIR"))
|
||||
get_path_from_env("PAPERLESS_TRASH_DIR", os.getenv("PAPERLESS_EMPTY_TRASH_DIR"))
|
||||
if os.getenv("PAPERLESS_TRASH_DIR") or os.getenv("PAPERLESS_EMPTY_TRASH_DIR")
|
||||
else None
|
||||
)
|
||||
@@ -309,21 +85,21 @@ EMPTY_TRASH_DIR = (
|
||||
# threads.
|
||||
MEDIA_LOCK = MEDIA_ROOT / "media.lock"
|
||||
INDEX_DIR = DATA_DIR / "index"
|
||||
MODEL_FILE = __get_path(
|
||||
MODEL_FILE = get_path_from_env(
|
||||
"PAPERLESS_MODEL_FILE",
|
||||
DATA_DIR / "classification_model.pickle",
|
||||
)
|
||||
LLM_INDEX_DIR = DATA_DIR / "llm_index"
|
||||
|
||||
LOGGING_DIR = __get_path("PAPERLESS_LOGGING_DIR", DATA_DIR / "log")
|
||||
LOGGING_DIR = get_path_from_env("PAPERLESS_LOGGING_DIR", DATA_DIR / "log")
|
||||
|
||||
CONSUMPTION_DIR = __get_path(
|
||||
CONSUMPTION_DIR = get_path_from_env(
|
||||
"PAPERLESS_CONSUMPTION_DIR",
|
||||
BASE_DIR.parent / "consume",
|
||||
)
|
||||
|
||||
# This will be created if it doesn't exist
|
||||
SCRATCH_DIR = __get_path(
|
||||
SCRATCH_DIR = get_path_from_env(
|
||||
"PAPERLESS_SCRATCH_DIR",
|
||||
Path(tempfile.gettempdir()) / "paperless",
|
||||
)
|
||||
@@ -332,7 +108,7 @@ SCRATCH_DIR = __get_path(
|
||||
# Application Definition #
|
||||
###############################################################################
|
||||
|
||||
env_apps = __get_list("PAPERLESS_APPS")
|
||||
env_apps = get_list_from_env("PAPERLESS_APPS")
|
||||
|
||||
INSTALLED_APPS = [
|
||||
"whitenoise.runserver_nostatic",
|
||||
@@ -405,7 +181,7 @@ MIDDLEWARE = [
|
||||
]
|
||||
|
||||
# Optional to enable compression
|
||||
if __get_boolean("PAPERLESS_ENABLE_COMPRESSION", "yes"): # pragma: no cover
|
||||
if get_bool_from_env("PAPERLESS_ENABLE_COMPRESSION", "yes"): # pragma: no cover
|
||||
MIDDLEWARE.insert(0, "compression_middleware.middleware.CompressionMiddleware")
|
||||
|
||||
# Workaround to not compress streaming responses (e.g. chat).
|
||||
@@ -424,20 +200,8 @@ CompressionMiddleware.process_response = patched_process_response
|
||||
ROOT_URLCONF = "paperless.urls"
|
||||
|
||||
|
||||
def _parse_base_paths() -> tuple[str, str, str, str, str]:
|
||||
script_name = os.getenv("PAPERLESS_FORCE_SCRIPT_NAME")
|
||||
base_url = (script_name or "") + "/"
|
||||
login_url = base_url + "accounts/login/"
|
||||
login_redirect_url = base_url + "dashboard"
|
||||
logout_redirect_url = os.getenv(
|
||||
"PAPERLESS_LOGOUT_REDIRECT_URL",
|
||||
login_url + "?loggedout=1",
|
||||
)
|
||||
return script_name, base_url, login_url, login_redirect_url, logout_redirect_url
|
||||
|
||||
|
||||
FORCE_SCRIPT_NAME, BASE_URL, LOGIN_URL, LOGIN_REDIRECT_URL, LOGOUT_REDIRECT_URL = (
|
||||
_parse_base_paths()
|
||||
parse_hosting_settings()
|
||||
)
|
||||
|
||||
# DRF Spectacular settings
|
||||
@@ -471,7 +235,7 @@ STORAGES = {
|
||||
"default": {"BACKEND": "django.core.files.storage.FileSystemStorage"},
|
||||
}
|
||||
|
||||
_CELERY_REDIS_URL, _CHANNELS_REDIS_URL = _parse_redis_url(
|
||||
_CELERY_REDIS_URL, _CHANNELS_REDIS_URL = parse_redis_url(
|
||||
os.getenv("PAPERLESS_REDIS", None),
|
||||
)
|
||||
_REDIS_KEY_PREFIX = os.getenv("PAPERLESS_REDIS_PREFIX", "")
|
||||
@@ -520,8 +284,8 @@ EMAIL_PORT: Final[int] = int(os.getenv("PAPERLESS_EMAIL_PORT", 25))
|
||||
EMAIL_HOST_USER: Final[str] = os.getenv("PAPERLESS_EMAIL_HOST_USER", "")
|
||||
EMAIL_HOST_PASSWORD: Final[str] = os.getenv("PAPERLESS_EMAIL_HOST_PASSWORD", "")
|
||||
DEFAULT_FROM_EMAIL: Final[str] = os.getenv("PAPERLESS_EMAIL_FROM", EMAIL_HOST_USER)
|
||||
EMAIL_USE_TLS: Final[bool] = __get_boolean("PAPERLESS_EMAIL_USE_TLS")
|
||||
EMAIL_USE_SSL: Final[bool] = __get_boolean("PAPERLESS_EMAIL_USE_SSL")
|
||||
EMAIL_USE_TLS: Final[bool] = get_bool_from_env("PAPERLESS_EMAIL_USE_TLS")
|
||||
EMAIL_USE_SSL: Final[bool] = get_bool_from_env("PAPERLESS_EMAIL_USE_SSL")
|
||||
EMAIL_SUBJECT_PREFIX: Final[str] = "[Paperless-ngx] "
|
||||
EMAIL_TIMEOUT = 30.0
|
||||
EMAIL_ENABLED = EMAIL_HOST != "localhost" or EMAIL_HOST_USER != ""
|
||||
@@ -546,20 +310,22 @@ ACCOUNT_DEFAULT_HTTP_PROTOCOL = os.getenv(
|
||||
)
|
||||
|
||||
ACCOUNT_ADAPTER = "paperless.adapter.CustomAccountAdapter"
|
||||
ACCOUNT_ALLOW_SIGNUPS = __get_boolean("PAPERLESS_ACCOUNT_ALLOW_SIGNUPS")
|
||||
ACCOUNT_DEFAULT_GROUPS = __get_list("PAPERLESS_ACCOUNT_DEFAULT_GROUPS")
|
||||
ACCOUNT_ALLOW_SIGNUPS = get_bool_from_env("PAPERLESS_ACCOUNT_ALLOW_SIGNUPS")
|
||||
ACCOUNT_DEFAULT_GROUPS = get_list_from_env("PAPERLESS_ACCOUNT_DEFAULT_GROUPS")
|
||||
|
||||
SOCIALACCOUNT_ADAPTER = "paperless.adapter.CustomSocialAccountAdapter"
|
||||
SOCIALACCOUNT_ALLOW_SIGNUPS = __get_boolean(
|
||||
SOCIALACCOUNT_ALLOW_SIGNUPS = get_bool_from_env(
|
||||
"PAPERLESS_SOCIALACCOUNT_ALLOW_SIGNUPS",
|
||||
"yes",
|
||||
)
|
||||
SOCIALACCOUNT_AUTO_SIGNUP = __get_boolean("PAPERLESS_SOCIAL_AUTO_SIGNUP")
|
||||
SOCIALACCOUNT_AUTO_SIGNUP = get_bool_from_env("PAPERLESS_SOCIAL_AUTO_SIGNUP")
|
||||
SOCIALACCOUNT_PROVIDERS = json.loads(
|
||||
os.getenv("PAPERLESS_SOCIALACCOUNT_PROVIDERS", "{}"),
|
||||
)
|
||||
SOCIAL_ACCOUNT_DEFAULT_GROUPS = __get_list("PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS")
|
||||
SOCIAL_ACCOUNT_SYNC_GROUPS = __get_boolean("PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS")
|
||||
SOCIAL_ACCOUNT_DEFAULT_GROUPS = get_list_from_env(
|
||||
"PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS",
|
||||
)
|
||||
SOCIAL_ACCOUNT_SYNC_GROUPS = get_bool_from_env("PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS")
|
||||
SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM: Final[str] = os.getenv(
|
||||
"PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM",
|
||||
"groups",
|
||||
@@ -571,8 +337,8 @@ MFA_TOTP_ISSUER = "Paperless-ngx"
|
||||
|
||||
ACCOUNT_EMAIL_SUBJECT_PREFIX = "[Paperless-ngx] "
|
||||
|
||||
DISABLE_REGULAR_LOGIN = __get_boolean("PAPERLESS_DISABLE_REGULAR_LOGIN")
|
||||
REDIRECT_LOGIN_TO_SSO = __get_boolean("PAPERLESS_REDIRECT_LOGIN_TO_SSO")
|
||||
DISABLE_REGULAR_LOGIN = get_bool_from_env("PAPERLESS_DISABLE_REGULAR_LOGIN")
|
||||
REDIRECT_LOGIN_TO_SSO = get_bool_from_env("PAPERLESS_REDIRECT_LOGIN_TO_SSO")
|
||||
|
||||
AUTO_LOGIN_USERNAME = os.getenv("PAPERLESS_AUTO_LOGIN_USERNAME")
|
||||
|
||||
@@ -585,12 +351,15 @@ ACCOUNT_EMAIL_VERIFICATION = (
|
||||
)
|
||||
)
|
||||
|
||||
ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS = __get_boolean(
|
||||
ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS = get_bool_from_env(
|
||||
"PAPERLESS_ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS",
|
||||
"True",
|
||||
)
|
||||
|
||||
ACCOUNT_SESSION_REMEMBER = __get_boolean("PAPERLESS_ACCOUNT_SESSION_REMEMBER", "True")
|
||||
ACCOUNT_SESSION_REMEMBER = get_bool_from_env(
|
||||
"PAPERLESS_ACCOUNT_SESSION_REMEMBER",
|
||||
"True",
|
||||
)
|
||||
SESSION_EXPIRE_AT_BROWSER_CLOSE = not ACCOUNT_SESSION_REMEMBER
|
||||
SESSION_COOKIE_AGE = int(
|
||||
os.getenv("PAPERLESS_SESSION_COOKIE_AGE", 60 * 60 * 24 * 7 * 3),
|
||||
@@ -607,8 +376,8 @@ if AUTO_LOGIN_USERNAME:
|
||||
|
||||
def _parse_remote_user_settings() -> str:
|
||||
global MIDDLEWARE, AUTHENTICATION_BACKENDS, REST_FRAMEWORK
|
||||
enable = __get_boolean("PAPERLESS_ENABLE_HTTP_REMOTE_USER")
|
||||
enable_api = __get_boolean("PAPERLESS_ENABLE_HTTP_REMOTE_USER_API")
|
||||
enable = get_bool_from_env("PAPERLESS_ENABLE_HTTP_REMOTE_USER")
|
||||
enable_api = get_bool_from_env("PAPERLESS_ENABLE_HTTP_REMOTE_USER_API")
|
||||
if enable or enable_api:
|
||||
MIDDLEWARE.append("paperless.auth.HttpRemoteUserMiddleware")
|
||||
AUTHENTICATION_BACKENDS.insert(
|
||||
@@ -636,16 +405,16 @@ HTTP_REMOTE_USER_HEADER_NAME = _parse_remote_user_settings()
|
||||
X_FRAME_OPTIONS = "SAMEORIGIN"
|
||||
|
||||
# The next 3 settings can also be set using just PAPERLESS_URL
|
||||
CSRF_TRUSTED_ORIGINS = __get_list("PAPERLESS_CSRF_TRUSTED_ORIGINS")
|
||||
CSRF_TRUSTED_ORIGINS = get_list_from_env("PAPERLESS_CSRF_TRUSTED_ORIGINS")
|
||||
|
||||
if DEBUG:
|
||||
# Allow access from the angular development server during debugging
|
||||
CSRF_TRUSTED_ORIGINS.append("http://localhost:4200")
|
||||
|
||||
# We allow CORS from localhost:8000
|
||||
CORS_ALLOWED_ORIGINS = __get_list(
|
||||
CORS_ALLOWED_ORIGINS = get_list_from_env(
|
||||
"PAPERLESS_CORS_ALLOWED_HOSTS",
|
||||
["http://localhost:8000"],
|
||||
default=["http://localhost:8000"],
|
||||
)
|
||||
|
||||
if DEBUG:
|
||||
@@ -658,7 +427,7 @@ CORS_EXPOSE_HEADERS = [
|
||||
"Content-Disposition",
|
||||
]
|
||||
|
||||
ALLOWED_HOSTS = __get_list("PAPERLESS_ALLOWED_HOSTS", ["*"])
|
||||
ALLOWED_HOSTS = get_list_from_env("PAPERLESS_ALLOWED_HOSTS", default=["*"])
|
||||
if ALLOWED_HOSTS != ["*"]:
|
||||
# always allow localhost. Necessary e.g. for healthcheck in docker.
|
||||
ALLOWED_HOSTS.append("localhost")
|
||||
@@ -678,10 +447,10 @@ def _parse_paperless_url():
|
||||
PAPERLESS_URL = _parse_paperless_url()
|
||||
|
||||
# For use with trusted proxies
|
||||
TRUSTED_PROXIES = __get_list("PAPERLESS_TRUSTED_PROXIES")
|
||||
TRUSTED_PROXIES = get_list_from_env("PAPERLESS_TRUSTED_PROXIES")
|
||||
|
||||
USE_X_FORWARDED_HOST = __get_boolean("PAPERLESS_USE_X_FORWARD_HOST", "false")
|
||||
USE_X_FORWARDED_PORT = __get_boolean("PAPERLESS_USE_X_FORWARD_PORT", "false")
|
||||
USE_X_FORWARDED_HOST = get_bool_from_env("PAPERLESS_USE_X_FORWARD_HOST", "false")
|
||||
USE_X_FORWARDED_PORT = get_bool_from_env("PAPERLESS_USE_X_FORWARD_PORT", "false")
|
||||
SECURE_PROXY_SSL_HEADER = (
|
||||
tuple(json.loads(os.environ["PAPERLESS_PROXY_SSL_HEADER"]))
|
||||
if "PAPERLESS_PROXY_SSL_HEADER" in os.environ
|
||||
@@ -724,7 +493,7 @@ CSRF_COOKIE_NAME = f"{COOKIE_PREFIX}csrftoken"
|
||||
SESSION_COOKIE_NAME = f"{COOKIE_PREFIX}sessionid"
|
||||
LANGUAGE_COOKIE_NAME = f"{COOKIE_PREFIX}django_language"
|
||||
|
||||
EMAIL_CERTIFICATE_FILE = __get_optional_path("PAPERLESS_EMAIL_CERTIFICATE_LOCATION")
|
||||
EMAIL_CERTIFICATE_FILE = get_path_from_env("PAPERLESS_EMAIL_CERTIFICATE_LOCATION")
|
||||
|
||||
|
||||
###############################################################################
|
||||
@@ -875,7 +644,7 @@ CELERY_BROKER_URL = _CELERY_REDIS_URL
|
||||
CELERY_TIMEZONE = TIME_ZONE
|
||||
|
||||
CELERY_WORKER_HIJACK_ROOT_LOGGER = False
|
||||
CELERY_WORKER_CONCURRENCY: Final[int] = __get_int("PAPERLESS_TASK_WORKERS", 1)
|
||||
CELERY_WORKER_CONCURRENCY: Final[int] = get_int_from_env("PAPERLESS_TASK_WORKERS", 1)
|
||||
TASK_WORKERS = CELERY_WORKER_CONCURRENCY
|
||||
CELERY_WORKER_MAX_TASKS_PER_CHILD = 1
|
||||
CELERY_WORKER_SEND_TASK_EVENTS = True
|
||||
@@ -888,7 +657,7 @@ CELERY_BROKER_TRANSPORT_OPTIONS = {
|
||||
}
|
||||
|
||||
CELERY_TASK_TRACK_STARTED = True
|
||||
CELERY_TASK_TIME_LIMIT: Final[int] = __get_int("PAPERLESS_WORKER_TIMEOUT", 1800)
|
||||
CELERY_TASK_TIME_LIMIT: Final[int] = get_int_from_env("PAPERLESS_WORKER_TIMEOUT", 1800)
|
||||
|
||||
CELERY_RESULT_EXTENDED = True
|
||||
CELERY_RESULT_BACKEND = "django-db"
|
||||
@@ -900,7 +669,7 @@ CELERY_TASK_SERIALIZER = "pickle"
|
||||
CELERY_ACCEPT_CONTENT = ["application/json", "application/x-python-serialize"]
|
||||
|
||||
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#beat-schedule
|
||||
CELERY_BEAT_SCHEDULE = _parse_beat_schedule()
|
||||
CELERY_BEAT_SCHEDULE = parse_beat_schedule()
|
||||
|
||||
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#beat-schedule-filename
|
||||
CELERY_BEAT_SCHEDULE_FILENAME = str(DATA_DIR / "celerybeat-schedule.db")
|
||||
@@ -908,14 +677,14 @@ CELERY_BEAT_SCHEDULE_FILENAME = str(DATA_DIR / "celerybeat-schedule.db")
|
||||
|
||||
# Cachalot: Database read cache.
|
||||
def _parse_cachalot_settings():
|
||||
ttl = __get_int("PAPERLESS_READ_CACHE_TTL", 3600)
|
||||
ttl = get_int_from_env("PAPERLESS_READ_CACHE_TTL", 3600)
|
||||
ttl = min(ttl, 31536000) if ttl > 0 else 3600
|
||||
_, redis_url = _parse_redis_url(
|
||||
_, redis_url = parse_redis_url(
|
||||
os.getenv("PAPERLESS_READ_CACHE_REDIS_URL", _CHANNELS_REDIS_URL),
|
||||
)
|
||||
result = {
|
||||
"CACHALOT_CACHE": "read-cache",
|
||||
"CACHALOT_ENABLED": __get_boolean(
|
||||
"CACHALOT_ENABLED": get_bool_from_env(
|
||||
"PAPERLESS_DB_READ_CACHE_ENABLED",
|
||||
default="no",
|
||||
),
|
||||
@@ -1000,9 +769,9 @@ CONSUMER_POLLING_INTERVAL = float(os.getenv("PAPERLESS_CONSUMER_POLLING_INTERVAL
|
||||
|
||||
CONSUMER_STABILITY_DELAY = float(os.getenv("PAPERLESS_CONSUMER_STABILITY_DELAY", 5))
|
||||
|
||||
CONSUMER_DELETE_DUPLICATES = __get_boolean("PAPERLESS_CONSUMER_DELETE_DUPLICATES")
|
||||
CONSUMER_DELETE_DUPLICATES = get_bool_from_env("PAPERLESS_CONSUMER_DELETE_DUPLICATES")
|
||||
|
||||
CONSUMER_RECURSIVE = __get_boolean("PAPERLESS_CONSUMER_RECURSIVE")
|
||||
CONSUMER_RECURSIVE = get_bool_from_env("PAPERLESS_CONSUMER_RECURSIVE")
|
||||
|
||||
# Ignore regex patterns, matched against filename only
|
||||
CONSUMER_IGNORE_PATTERNS = list(
|
||||
@@ -1024,13 +793,13 @@ CONSUMER_IGNORE_DIRS = list(
|
||||
),
|
||||
)
|
||||
|
||||
CONSUMER_SUBDIRS_AS_TAGS = __get_boolean("PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS")
|
||||
CONSUMER_SUBDIRS_AS_TAGS = get_bool_from_env("PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS")
|
||||
|
||||
CONSUMER_ENABLE_BARCODES: Final[bool] = __get_boolean(
|
||||
CONSUMER_ENABLE_BARCODES: Final[bool] = get_bool_from_env(
|
||||
"PAPERLESS_CONSUMER_ENABLE_BARCODES",
|
||||
)
|
||||
|
||||
CONSUMER_BARCODE_TIFF_SUPPORT: Final[bool] = __get_boolean(
|
||||
CONSUMER_BARCODE_TIFF_SUPPORT: Final[bool] = get_bool_from_env(
|
||||
"PAPERLESS_CONSUMER_BARCODE_TIFF_SUPPORT",
|
||||
)
|
||||
|
||||
@@ -1039,7 +808,7 @@ CONSUMER_BARCODE_STRING: Final[str] = os.getenv(
|
||||
"PATCHT",
|
||||
)
|
||||
|
||||
CONSUMER_ENABLE_ASN_BARCODE: Final[bool] = __get_boolean(
|
||||
CONSUMER_ENABLE_ASN_BARCODE: Final[bool] = get_bool_from_env(
|
||||
"PAPERLESS_CONSUMER_ENABLE_ASN_BARCODE",
|
||||
)
|
||||
|
||||
@@ -1048,23 +817,26 @@ CONSUMER_ASN_BARCODE_PREFIX: Final[str] = os.getenv(
|
||||
"ASN",
|
||||
)
|
||||
|
||||
CONSUMER_BARCODE_UPSCALE: Final[float] = __get_float(
|
||||
CONSUMER_BARCODE_UPSCALE: Final[float] = get_float_from_env(
|
||||
"PAPERLESS_CONSUMER_BARCODE_UPSCALE",
|
||||
0.0,
|
||||
)
|
||||
|
||||
CONSUMER_BARCODE_DPI: Final[int] = __get_int("PAPERLESS_CONSUMER_BARCODE_DPI", 300)
|
||||
CONSUMER_BARCODE_DPI: Final[int] = get_int_from_env(
|
||||
"PAPERLESS_CONSUMER_BARCODE_DPI",
|
||||
300,
|
||||
)
|
||||
|
||||
CONSUMER_BARCODE_MAX_PAGES: Final[int] = __get_int(
|
||||
CONSUMER_BARCODE_MAX_PAGES: Final[int] = get_int_from_env(
|
||||
"PAPERLESS_CONSUMER_BARCODE_MAX_PAGES",
|
||||
0,
|
||||
)
|
||||
|
||||
CONSUMER_BARCODE_RETAIN_SPLIT_PAGES = __get_boolean(
|
||||
CONSUMER_BARCODE_RETAIN_SPLIT_PAGES = get_bool_from_env(
|
||||
"PAPERLESS_CONSUMER_BARCODE_RETAIN_SPLIT_PAGES",
|
||||
)
|
||||
|
||||
CONSUMER_ENABLE_TAG_BARCODE: Final[bool] = __get_boolean(
|
||||
CONSUMER_ENABLE_TAG_BARCODE: Final[bool] = get_bool_from_env(
|
||||
"PAPERLESS_CONSUMER_ENABLE_TAG_BARCODE",
|
||||
)
|
||||
|
||||
@@ -1077,11 +849,11 @@ CONSUMER_TAG_BARCODE_MAPPING = dict(
|
||||
),
|
||||
)
|
||||
|
||||
CONSUMER_TAG_BARCODE_SPLIT: Final[bool] = __get_boolean(
|
||||
CONSUMER_TAG_BARCODE_SPLIT: Final[bool] = get_bool_from_env(
|
||||
"PAPERLESS_CONSUMER_TAG_BARCODE_SPLIT",
|
||||
)
|
||||
|
||||
CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED: Final[bool] = __get_boolean(
|
||||
CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED: Final[bool] = get_bool_from_env(
|
||||
"PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED",
|
||||
)
|
||||
|
||||
@@ -1090,13 +862,13 @@ CONSUMER_COLLATE_DOUBLE_SIDED_SUBDIR_NAME: Final[str] = os.getenv(
|
||||
"double-sided",
|
||||
)
|
||||
|
||||
CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT: Final[bool] = __get_boolean(
|
||||
CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT: Final[bool] = get_bool_from_env(
|
||||
"PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT",
|
||||
)
|
||||
|
||||
CONSUMER_PDF_RECOVERABLE_MIME_TYPES = ("application/octet-stream",)
|
||||
|
||||
OCR_PAGES = __get_optional_int("PAPERLESS_OCR_PAGES")
|
||||
OCR_PAGES = get_int_from_env("PAPERLESS_OCR_PAGES")
|
||||
|
||||
# The default language that tesseract will attempt to use when parsing
|
||||
# documents. It should be a 3-letter language code consistent with ISO 639.
|
||||
@@ -1110,20 +882,20 @@ OCR_MODE = os.getenv("PAPERLESS_OCR_MODE", "skip")
|
||||
|
||||
OCR_SKIP_ARCHIVE_FILE = os.getenv("PAPERLESS_OCR_SKIP_ARCHIVE_FILE", "never")
|
||||
|
||||
OCR_IMAGE_DPI = __get_optional_int("PAPERLESS_OCR_IMAGE_DPI")
|
||||
OCR_IMAGE_DPI = get_int_from_env("PAPERLESS_OCR_IMAGE_DPI")
|
||||
|
||||
OCR_CLEAN = os.getenv("PAPERLESS_OCR_CLEAN", "clean")
|
||||
|
||||
OCR_DESKEW: Final[bool] = __get_boolean("PAPERLESS_OCR_DESKEW", "true")
|
||||
OCR_DESKEW: Final[bool] = get_bool_from_env("PAPERLESS_OCR_DESKEW", "true")
|
||||
|
||||
OCR_ROTATE_PAGES: Final[bool] = __get_boolean("PAPERLESS_OCR_ROTATE_PAGES", "true")
|
||||
OCR_ROTATE_PAGES: Final[bool] = get_bool_from_env("PAPERLESS_OCR_ROTATE_PAGES", "true")
|
||||
|
||||
OCR_ROTATE_PAGES_THRESHOLD: Final[float] = __get_float(
|
||||
OCR_ROTATE_PAGES_THRESHOLD: Final[float] = get_float_from_env(
|
||||
"PAPERLESS_OCR_ROTATE_PAGES_THRESHOLD",
|
||||
12.0,
|
||||
)
|
||||
|
||||
OCR_MAX_IMAGE_PIXELS: Final[int | None] = __get_optional_int(
|
||||
OCR_MAX_IMAGE_PIXELS: Final[int | None] = get_int_from_env(
|
||||
"PAPERLESS_OCR_MAX_IMAGE_PIXELS",
|
||||
)
|
||||
|
||||
@@ -1134,7 +906,7 @@ OCR_COLOR_CONVERSION_STRATEGY = os.getenv(
|
||||
|
||||
OCR_USER_ARGS = os.getenv("PAPERLESS_OCR_USER_ARGS")
|
||||
|
||||
MAX_IMAGE_PIXELS: Final[int | None] = __get_optional_int(
|
||||
MAX_IMAGE_PIXELS: Final[int | None] = get_int_from_env(
|
||||
"PAPERLESS_MAX_IMAGE_PIXELS",
|
||||
)
|
||||
|
||||
@@ -1149,7 +921,7 @@ CONVERT_MEMORY_LIMIT = os.getenv("PAPERLESS_CONVERT_MEMORY_LIMIT")
|
||||
GS_BINARY = os.getenv("PAPERLESS_GS_BINARY", "gs")
|
||||
|
||||
# Fallback layout for .eml consumption
|
||||
EMAIL_PARSE_DEFAULT_LAYOUT = __get_int(
|
||||
EMAIL_PARSE_DEFAULT_LAYOUT = get_int_from_env(
|
||||
"PAPERLESS_EMAIL_PARSE_DEFAULT_LAYOUT",
|
||||
1, # MailRule.PdfLayout.TEXT_HTML but that can't be imported here
|
||||
)
|
||||
@@ -1163,23 +935,9 @@ DATE_ORDER = os.getenv("PAPERLESS_DATE_ORDER", "DMY")
|
||||
FILENAME_DATE_ORDER = os.getenv("PAPERLESS_FILENAME_DATE_ORDER")
|
||||
|
||||
|
||||
def _parse_dateparser_languages(languages: str | None):
|
||||
language_list = languages.split("+") if languages else []
|
||||
# There is an unfixed issue in zh-Hant and zh-Hans locales in the dateparser lib.
|
||||
# See: https://github.com/scrapinghub/dateparser/issues/875
|
||||
for index, language in enumerate(language_list):
|
||||
if language.startswith("zh-") and "zh" not in language_list:
|
||||
logger.warning(
|
||||
f'Chinese locale detected: {language}. dateparser might fail to parse some dates with this locale, so Chinese ("zh") will be used as a fallback.',
|
||||
)
|
||||
language_list.append("zh")
|
||||
|
||||
return list(LocaleDataLoader().get_locale_map(locales=language_list))
|
||||
|
||||
|
||||
# If not set, we will infer it at runtime
|
||||
DATE_PARSER_LANGUAGES = (
|
||||
_parse_dateparser_languages(
|
||||
parse_dateparser_languages(
|
||||
os.getenv("PAPERLESS_DATE_PARSER_LANGUAGES"),
|
||||
)
|
||||
if os.getenv("PAPERLESS_DATE_PARSER_LANGUAGES")
|
||||
@@ -1190,7 +948,7 @@ DATE_PARSER_LANGUAGES = (
|
||||
# Maximum number of dates taken from document start to end to show as suggestions for
|
||||
# `created` date in the frontend. Duplicates are removed, which can result in
|
||||
# fewer dates shown.
|
||||
NUMBER_OF_SUGGESTED_DATES = __get_int("PAPERLESS_NUMBER_OF_SUGGESTED_DATES", 3)
|
||||
NUMBER_OF_SUGGESTED_DATES = get_int_from_env("PAPERLESS_NUMBER_OF_SUGGESTED_DATES", 3)
|
||||
|
||||
# Specify the filename format for out files
|
||||
FILENAME_FORMAT = os.getenv("PAPERLESS_FILENAME_FORMAT")
|
||||
@@ -1198,7 +956,7 @@ FILENAME_FORMAT = os.getenv("PAPERLESS_FILENAME_FORMAT")
|
||||
# If this is enabled, variables in filename format will resolve to
|
||||
# empty-string instead of 'none'.
|
||||
# Directories with 'empty names' are omitted, too.
|
||||
FILENAME_FORMAT_REMOVE_NONE = __get_boolean(
|
||||
FILENAME_FORMAT_REMOVE_NONE = get_bool_from_env(
|
||||
"PAPERLESS_FILENAME_FORMAT_REMOVE_NONE",
|
||||
"NO",
|
||||
)
|
||||
@@ -1209,7 +967,7 @@ THUMBNAIL_FONT_NAME = os.getenv(
|
||||
)
|
||||
|
||||
# Tika settings
|
||||
TIKA_ENABLED = __get_boolean("PAPERLESS_TIKA_ENABLED", "NO")
|
||||
TIKA_ENABLED = get_bool_from_env("PAPERLESS_TIKA_ENABLED", "NO")
|
||||
TIKA_ENDPOINT = os.getenv("PAPERLESS_TIKA_ENDPOINT", "http://localhost:9998")
|
||||
TIKA_GOTENBERG_ENDPOINT = os.getenv(
|
||||
"PAPERLESS_TIKA_GOTENBERG_ENDPOINT",
|
||||
@@ -1219,52 +977,21 @@ TIKA_GOTENBERG_ENDPOINT = os.getenv(
|
||||
if TIKA_ENABLED:
|
||||
INSTALLED_APPS.append("paperless_tika.apps.PaperlessTikaConfig")
|
||||
|
||||
AUDIT_LOG_ENABLED = __get_boolean("PAPERLESS_AUDIT_LOG_ENABLED", "true")
|
||||
AUDIT_LOG_ENABLED = get_bool_from_env("PAPERLESS_AUDIT_LOG_ENABLED", "true")
|
||||
if AUDIT_LOG_ENABLED:
|
||||
INSTALLED_APPS.append("auditlog")
|
||||
MIDDLEWARE.append("auditlog.middleware.AuditlogMiddleware")
|
||||
|
||||
|
||||
def _parse_ignore_dates(
|
||||
env_ignore: str,
|
||||
date_order: str = DATE_ORDER,
|
||||
) -> set[datetime.datetime]:
|
||||
"""
|
||||
If the PAPERLESS_IGNORE_DATES environment variable is set, parse the
|
||||
user provided string(s) into dates
|
||||
|
||||
Args:
|
||||
env_ignore (str): The value of the environment variable, comma separated dates
|
||||
date_order (str, optional): The format of the date strings.
|
||||
Defaults to DATE_ORDER.
|
||||
|
||||
Returns:
|
||||
Set[datetime.datetime]: The set of parsed date objects
|
||||
"""
|
||||
import dateparser
|
||||
|
||||
ignored_dates = set()
|
||||
for s in env_ignore.split(","):
|
||||
d = dateparser.parse(
|
||||
s,
|
||||
settings={
|
||||
"DATE_ORDER": date_order,
|
||||
},
|
||||
)
|
||||
if d:
|
||||
ignored_dates.add(d.date())
|
||||
return ignored_dates
|
||||
|
||||
|
||||
# List dates that should be ignored when trying to parse date from document text
|
||||
IGNORE_DATES: set[datetime.date] = set()
|
||||
|
||||
if os.getenv("PAPERLESS_IGNORE_DATES") is not None:
|
||||
IGNORE_DATES = _parse_ignore_dates(os.getenv("PAPERLESS_IGNORE_DATES"))
|
||||
IGNORE_DATES = parse_ignore_dates(os.getenv("PAPERLESS_IGNORE_DATES"), DATE_ORDER)
|
||||
|
||||
ENABLE_UPDATE_CHECK = os.getenv("PAPERLESS_ENABLE_UPDATE_CHECK", "default")
|
||||
if ENABLE_UPDATE_CHECK != "default":
|
||||
ENABLE_UPDATE_CHECK = __get_boolean("PAPERLESS_ENABLE_UPDATE_CHECK")
|
||||
ENABLE_UPDATE_CHECK = get_bool_from_env("PAPERLESS_ENABLE_UPDATE_CHECK")
|
||||
|
||||
APP_TITLE = os.getenv("PAPERLESS_APP_TITLE", None)
|
||||
APP_LOGO = os.getenv("PAPERLESS_APP_LOGO", None)
|
||||
@@ -1309,7 +1036,7 @@ def _get_nltk_language_setting(ocr_lang: str) -> str | None:
|
||||
return iso_code_to_nltk.get(ocr_lang)
|
||||
|
||||
|
||||
NLTK_ENABLED: Final[bool] = __get_boolean("PAPERLESS_ENABLE_NLTK", "yes")
|
||||
NLTK_ENABLED: Final[bool] = get_bool_from_env("PAPERLESS_ENABLE_NLTK", "yes")
|
||||
|
||||
NLTK_LANGUAGE: str | None = _get_nltk_language_setting(OCR_LANGUAGE)
|
||||
|
||||
@@ -1318,7 +1045,7 @@ NLTK_LANGUAGE: str | None = _get_nltk_language_setting(OCR_LANGUAGE)
|
||||
###############################################################################
|
||||
|
||||
EMAIL_GNUPG_HOME: Final[str | None] = os.getenv("PAPERLESS_EMAIL_GNUPG_HOME")
|
||||
EMAIL_ENABLE_GPG_DECRYPTOR: Final[bool] = __get_boolean(
|
||||
EMAIL_ENABLE_GPG_DECRYPTOR: Final[bool] = get_bool_from_env(
|
||||
"PAPERLESS_ENABLE_GPG_DECRYPTOR",
|
||||
)
|
||||
|
||||
@@ -1326,7 +1053,7 @@ EMAIL_ENABLE_GPG_DECRYPTOR: Final[bool] = __get_boolean(
|
||||
###############################################################################
|
||||
# Soft Delete #
|
||||
###############################################################################
|
||||
EMPTY_TRASH_DELAY = max(__get_int("PAPERLESS_EMPTY_TRASH_DELAY", 30), 1)
|
||||
EMPTY_TRASH_DELAY = max(get_int_from_env("PAPERLESS_EMPTY_TRASH_DELAY", 30), 1)
|
||||
|
||||
|
||||
###############################################################################
|
||||
@@ -1351,21 +1078,17 @@ OUTLOOK_OAUTH_ENABLED = bool(
|
||||
###############################################################################
|
||||
# Webhooks
|
||||
###############################################################################
|
||||
WEBHOOKS_ALLOWED_SCHEMES = set(
|
||||
WEBHOOKS_ALLOWED_SCHEMES = {
|
||||
s.lower()
|
||||
for s in __get_list(
|
||||
for s in get_list_from_env(
|
||||
"PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES",
|
||||
["http", "https"],
|
||||
default=["http", "https"],
|
||||
)
|
||||
)
|
||||
WEBHOOKS_ALLOWED_PORTS = set(
|
||||
int(p)
|
||||
for p in __get_list(
|
||||
"PAPERLESS_WEBHOOKS_ALLOWED_PORTS",
|
||||
[],
|
||||
)
|
||||
)
|
||||
WEBHOOKS_ALLOW_INTERNAL_REQUESTS = __get_boolean(
|
||||
}
|
||||
WEBHOOKS_ALLOWED_PORTS = {
|
||||
int(p) for p in get_list_from_env("PAPERLESS_WEBHOOKS_ALLOWED_PORTS", default=[])
|
||||
}
|
||||
WEBHOOKS_ALLOW_INTERNAL_REQUESTS = get_bool_from_env(
|
||||
"PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS",
|
||||
"true",
|
||||
)
|
||||
@@ -1380,7 +1103,7 @@ REMOTE_OCR_ENDPOINT = os.getenv("PAPERLESS_REMOTE_OCR_ENDPOINT")
|
||||
################################################################################
|
||||
# AI Settings #
|
||||
################################################################################
|
||||
AI_ENABLED = __get_boolean("PAPERLESS_AI_ENABLED", "NO")
|
||||
AI_ENABLED = get_bool_from_env("PAPERLESS_AI_ENABLED", "NO")
|
||||
LLM_EMBEDDING_BACKEND = os.getenv(
|
||||
"PAPERLESS_AI_LLM_EMBEDDING_BACKEND",
|
||||
) # "huggingface" or "openai"
|
||||
|
||||
@@ -1,11 +1,191 @@
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from celery.schedules import crontab
|
||||
from dateparser.languages.loader import LocaleDataLoader
|
||||
|
||||
from paperless.settings.parsers import get_choice_from_env
|
||||
from paperless.settings.parsers import get_int_from_env
|
||||
from paperless.settings.parsers import parse_dict_from_str
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def parse_hosting_settings() -> tuple[str | None, str, str, str, str]:
|
||||
script_name = os.getenv("PAPERLESS_FORCE_SCRIPT_NAME")
|
||||
base_url = (script_name or "") + "/"
|
||||
login_url = base_url + "accounts/login/"
|
||||
login_redirect_url = base_url + "dashboard"
|
||||
logout_redirect_url = os.getenv(
|
||||
"PAPERLESS_LOGOUT_REDIRECT_URL",
|
||||
login_url + "?loggedout=1",
|
||||
)
|
||||
return script_name, base_url, login_url, login_redirect_url, logout_redirect_url
|
||||
|
||||
|
||||
def parse_redis_url(env_redis: str | None) -> tuple[str, str]:
|
||||
"""
|
||||
Gets the Redis information from the environment or a default and handles
|
||||
converting from incompatible django_channels and celery formats.
|
||||
|
||||
Returns a tuple of (celery_url, channels_url)
|
||||
"""
|
||||
|
||||
# Not set, return a compatible default
|
||||
if env_redis is None:
|
||||
return ("redis://localhost:6379", "redis://localhost:6379")
|
||||
|
||||
if "unix" in env_redis.lower():
|
||||
# channels_redis socket format, looks like:
|
||||
# "unix:///path/to/redis.sock"
|
||||
_, path = env_redis.split(":", maxsplit=1)
|
||||
# Optionally setting a db number
|
||||
if "?db=" in env_redis:
|
||||
path, number = path.split("?db=")
|
||||
return (f"redis+socket:{path}?virtual_host={number}", env_redis)
|
||||
else:
|
||||
return (f"redis+socket:{path}", env_redis)
|
||||
|
||||
elif "+socket" in env_redis.lower():
|
||||
# celery socket style, looks like:
|
||||
# "redis+socket:///path/to/redis.sock"
|
||||
_, path = env_redis.split(":", maxsplit=1)
|
||||
if "?virtual_host=" in env_redis:
|
||||
# Virtual host (aka db number)
|
||||
path, number = path.split("?virtual_host=")
|
||||
return (env_redis, f"unix:{path}?db={number}")
|
||||
else:
|
||||
return (env_redis, f"unix:{path}")
|
||||
|
||||
# Not a socket
|
||||
return (env_redis, env_redis)
|
||||
|
||||
|
||||
def parse_beat_schedule() -> dict:
|
||||
"""
|
||||
Configures the scheduled tasks, according to default or
|
||||
environment variables. Task expiration is configured so the task will
|
||||
expire (and not run), shortly before the default frequency will put another
|
||||
of the same task into the queue
|
||||
|
||||
|
||||
https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html#beat-entries
|
||||
https://docs.celeryq.dev/en/latest/userguide/calling.html#expiration
|
||||
"""
|
||||
schedule = {}
|
||||
tasks = [
|
||||
{
|
||||
"name": "Check all e-mail accounts",
|
||||
"env_key": "PAPERLESS_EMAIL_TASK_CRON",
|
||||
# Default every ten minutes
|
||||
"env_default": "*/10 * * * *",
|
||||
"task": "paperless_mail.tasks.process_mail_accounts",
|
||||
"options": {
|
||||
# 1 minute before default schedule sends again
|
||||
"expires": 9.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Train the classifier",
|
||||
"env_key": "PAPERLESS_TRAIN_TASK_CRON",
|
||||
# Default hourly at 5 minutes past the hour
|
||||
"env_default": "5 */1 * * *",
|
||||
"task": "documents.tasks.train_classifier",
|
||||
"options": {
|
||||
# 1 minute before default schedule sends again
|
||||
"expires": 59.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Optimize the index",
|
||||
"env_key": "PAPERLESS_INDEX_TASK_CRON",
|
||||
# Default daily at midnight
|
||||
"env_default": "0 0 * * *",
|
||||
"task": "documents.tasks.index_optimize",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Perform sanity check",
|
||||
"env_key": "PAPERLESS_SANITY_TASK_CRON",
|
||||
# Default Sunday at 00:30
|
||||
"env_default": "30 0 * * sun",
|
||||
"task": "documents.tasks.sanity_check",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": ((7.0 * 24.0) - 1.0) * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Empty trash",
|
||||
"env_key": "PAPERLESS_EMPTY_TRASH_TASK_CRON",
|
||||
# Default daily at 01:00
|
||||
"env_default": "0 1 * * *",
|
||||
"task": "documents.tasks.empty_trash",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Check and run scheduled workflows",
|
||||
"env_key": "PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON",
|
||||
# Default hourly at 5 minutes past the hour
|
||||
"env_default": "5 */1 * * *",
|
||||
"task": "documents.tasks.check_scheduled_workflows",
|
||||
"options": {
|
||||
# 1 minute before default schedule sends again
|
||||
"expires": 59.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Rebuild LLM index",
|
||||
"env_key": "PAPERLESS_LLM_INDEX_TASK_CRON",
|
||||
# Default daily at 02:10
|
||||
"env_default": "10 2 * * *",
|
||||
"task": "documents.tasks.llmindex_index",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Cleanup expired share link bundles",
|
||||
"env_key": "PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON",
|
||||
# Default daily at 02:00
|
||||
"env_default": "0 2 * * *",
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"options": {
|
||||
# 1 hour before default schedule sends again
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
]
|
||||
for task in tasks:
|
||||
# Either get the environment setting or use the default
|
||||
value = os.getenv(task["env_key"], task["env_default"])
|
||||
# Don't add disabled tasks to the schedule
|
||||
if value == "disable":
|
||||
continue
|
||||
# I find https://crontab.guru/ super helpful
|
||||
# crontab(5) format
|
||||
# - five time-and-date fields
|
||||
# - separated by at least one blank
|
||||
minute, hour, day_month, month, day_week = value.split(" ")
|
||||
|
||||
schedule[task["name"]] = {
|
||||
"task": task["task"],
|
||||
"schedule": crontab(minute, hour, day_week, day_month, month),
|
||||
"options": task["options"],
|
||||
}
|
||||
|
||||
return schedule
|
||||
|
||||
|
||||
def parse_db_settings(data_dir: Path) -> dict[str, dict[str, Any]]:
|
||||
"""Parse database settings from environment variables.
|
||||
@@ -120,3 +300,48 @@ def parse_db_settings(data_dir: Path) -> dict[str, dict[str, Any]]:
|
||||
)
|
||||
|
||||
return {"default": db_config}
|
||||
|
||||
|
||||
def parse_dateparser_languages(languages: str | None) -> list[str]:
|
||||
language_list = languages.split("+") if languages else []
|
||||
# There is an unfixed issue in zh-Hant and zh-Hans locales in the dateparser lib.
|
||||
# See: https://github.com/scrapinghub/dateparser/issues/875
|
||||
for index, language in enumerate(language_list):
|
||||
if language.startswith("zh-") and "zh" not in language_list:
|
||||
logger.warning(
|
||||
f"Chinese locale detected: {language}. dateparser might fail to parse"
|
||||
f' some dates with this locale, so Chinese ("zh") will be used as a fallback.',
|
||||
)
|
||||
language_list.append("zh")
|
||||
|
||||
return list(LocaleDataLoader().get_locale_map(locales=language_list))
|
||||
|
||||
|
||||
def parse_ignore_dates(
|
||||
env_ignore: str,
|
||||
date_order: str,
|
||||
) -> set[datetime.date]:
|
||||
"""
|
||||
If the PAPERLESS_IGNORE_DATES environment variable is set, parse the
|
||||
user provided string(s) into dates
|
||||
|
||||
Args:
|
||||
env_ignore (str): The value of the environment variable, comma separated dates
|
||||
date_order (str): The format of the date strings.
|
||||
|
||||
Returns:
|
||||
set[datetime.date]: The set of parsed date objects
|
||||
"""
|
||||
import dateparser
|
||||
|
||||
ignored_dates = set()
|
||||
for s in env_ignore.split(","):
|
||||
d = dateparser.parse(
|
||||
s,
|
||||
settings={
|
||||
"DATE_ORDER": date_order,
|
||||
},
|
||||
)
|
||||
if d:
|
||||
ignored_dates.add(d.date())
|
||||
return ignored_dates
|
||||
|
||||
@@ -156,6 +156,108 @@ def parse_dict_from_str(
|
||||
return settings
|
||||
|
||||
|
||||
def get_bool_from_env(key: str, default: str = "NO") -> bool:
|
||||
"""
|
||||
Return a boolean value based on whatever the user has supplied in the
|
||||
environment based on whether the value "looks like" it's True or not.
|
||||
"""
|
||||
return str_to_bool(os.getenv(key, default))
|
||||
|
||||
|
||||
@overload
|
||||
def get_float_from_env(key: str) -> float | None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def get_float_from_env(key: str, default: None) -> float | None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def get_float_from_env(key: str, default: float) -> float: ...
|
||||
|
||||
|
||||
def get_float_from_env(key: str, default: float | None = None) -> float | None:
|
||||
"""
|
||||
Return a float value based on the environment variable.
|
||||
If default is provided, returns that value when key is missing.
|
||||
If default is None, returns None when key is missing.
|
||||
"""
|
||||
if key not in os.environ:
|
||||
return default
|
||||
|
||||
return float(os.environ[key])
|
||||
|
||||
|
||||
@overload
|
||||
def get_path_from_env(key: str) -> Path | None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def get_path_from_env(key: str, default: None) -> Path | None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def get_path_from_env(key: str, default: Path | str) -> Path: ...
|
||||
|
||||
|
||||
def get_path_from_env(key: str, default: Path | str | None = None) -> Path | None:
|
||||
"""
|
||||
Return a Path object based on the environment variable.
|
||||
If default is provided, returns that value when key is missing.
|
||||
If default is None, returns None when key is missing.
|
||||
"""
|
||||
if key not in os.environ:
|
||||
return default if default is None else Path(default).resolve()
|
||||
|
||||
return Path(os.environ[key]).resolve()
|
||||
|
||||
|
||||
def get_list_from_env(
|
||||
key: str,
|
||||
separator: str = ",",
|
||||
default: list[T] | None = None,
|
||||
*,
|
||||
strip_whitespace: bool = True,
|
||||
remove_empty: bool = True,
|
||||
required: bool = False,
|
||||
) -> list[str] | list[T]:
|
||||
"""
|
||||
Get and parse a list from an environment variable or return a default.
|
||||
|
||||
Args:
|
||||
key: Environment variable name
|
||||
separator: Character(s) to split on (default: ',')
|
||||
default: Default value to return if env var is not set or empty
|
||||
strip_whitespace: Whether to strip whitespace from each element
|
||||
remove_empty: Whether to remove empty strings from the result
|
||||
required: If True, raise an error when the env var is missing and no default provided
|
||||
|
||||
Returns:
|
||||
List of strings or list of type-cast values, or default if env var is empty/None
|
||||
|
||||
Raises:
|
||||
ValueError: If required=True and env var is missing and there is no default
|
||||
"""
|
||||
# Get the environment variable value
|
||||
env_value = os.environ.get(key)
|
||||
|
||||
# Handle required environment variables
|
||||
if required and env_value is None and default is None:
|
||||
raise ValueError(f"Required environment variable '{key}' is not set")
|
||||
|
||||
if env_value:
|
||||
items = env_value.split(separator)
|
||||
if strip_whitespace:
|
||||
items = [item.strip() for item in items]
|
||||
if remove_empty:
|
||||
items = [item for item in items if item]
|
||||
return items
|
||||
elif default is not None:
|
||||
return default
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def get_choice_from_env(
|
||||
env_key: str,
|
||||
choices: set[str],
|
||||
|
||||
@@ -1,10 +1,279 @@
|
||||
import datetime
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from celery.schedules import crontab
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from paperless.settings.custom import parse_beat_schedule
|
||||
from paperless.settings.custom import parse_dateparser_languages
|
||||
from paperless.settings.custom import parse_db_settings
|
||||
from paperless.settings.custom import parse_hosting_settings
|
||||
from paperless.settings.custom import parse_ignore_dates
|
||||
from paperless.settings.custom import parse_redis_url
|
||||
|
||||
|
||||
class TestRedisSocketConversion:
|
||||
@pytest.mark.parametrize(
|
||||
("input_url", "expected"),
|
||||
[
|
||||
pytest.param(
|
||||
None,
|
||||
("redis://localhost:6379", "redis://localhost:6379"),
|
||||
id="none_uses_default",
|
||||
),
|
||||
pytest.param(
|
||||
"redis+socket:///run/redis/redis.sock",
|
||||
(
|
||||
"redis+socket:///run/redis/redis.sock",
|
||||
"unix:///run/redis/redis.sock",
|
||||
),
|
||||
id="celery_style_socket",
|
||||
),
|
||||
pytest.param(
|
||||
"unix:///run/redis/redis.sock",
|
||||
(
|
||||
"redis+socket:///run/redis/redis.sock",
|
||||
"unix:///run/redis/redis.sock",
|
||||
),
|
||||
id="redis_py_style_socket",
|
||||
),
|
||||
pytest.param(
|
||||
"redis+socket:///run/redis/redis.sock?virtual_host=5",
|
||||
(
|
||||
"redis+socket:///run/redis/redis.sock?virtual_host=5",
|
||||
"unix:///run/redis/redis.sock?db=5",
|
||||
),
|
||||
id="celery_style_socket_with_db",
|
||||
),
|
||||
pytest.param(
|
||||
"unix:///run/redis/redis.sock?db=10",
|
||||
(
|
||||
"redis+socket:///run/redis/redis.sock?virtual_host=10",
|
||||
"unix:///run/redis/redis.sock?db=10",
|
||||
),
|
||||
id="redis_py_style_socket_with_db",
|
||||
),
|
||||
pytest.param(
|
||||
"redis://myredishost:6379",
|
||||
("redis://myredishost:6379", "redis://myredishost:6379"),
|
||||
id="host_with_port_unchanged",
|
||||
),
|
||||
# Credentials in unix:// URL contain multiple colons (user:password@)
|
||||
# Regression test for https://github.com/paperless-ngx/paperless-ngx/pull/12239
|
||||
pytest.param(
|
||||
"unix://user:password@/run/redis/redis.sock",
|
||||
(
|
||||
"redis+socket://user:password@/run/redis/redis.sock",
|
||||
"unix://user:password@/run/redis/redis.sock",
|
||||
),
|
||||
id="redis_py_style_socket_with_credentials",
|
||||
),
|
||||
pytest.param(
|
||||
"redis+socket://user:password@/run/redis/redis.sock",
|
||||
(
|
||||
"redis+socket://user:password@/run/redis/redis.sock",
|
||||
"unix://user:password@/run/redis/redis.sock",
|
||||
),
|
||||
id="celery_style_socket_with_credentials",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_redis_socket_parsing(
|
||||
self,
|
||||
input_url: str | None,
|
||||
expected: tuple[str, str],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Various Redis connection URI formats
|
||||
WHEN:
|
||||
- The URI is parsed
|
||||
THEN:
|
||||
- Socket based URIs are translated
|
||||
- Non-socket URIs are unchanged
|
||||
- None provided uses default
|
||||
"""
|
||||
result = parse_redis_url(input_url)
|
||||
assert expected == result
|
||||
|
||||
|
||||
class TestParseHostingSettings:
|
||||
@pytest.mark.parametrize(
|
||||
("env", "expected"),
|
||||
[
|
||||
pytest.param(
|
||||
{},
|
||||
(
|
||||
None,
|
||||
"/",
|
||||
"/accounts/login/",
|
||||
"/dashboard",
|
||||
"/accounts/login/?loggedout=1",
|
||||
),
|
||||
id="no_env_vars",
|
||||
),
|
||||
pytest.param(
|
||||
{"PAPERLESS_FORCE_SCRIPT_NAME": "/paperless"},
|
||||
(
|
||||
"/paperless",
|
||||
"/paperless/",
|
||||
"/paperless/accounts/login/",
|
||||
"/paperless/dashboard",
|
||||
"/paperless/accounts/login/?loggedout=1",
|
||||
),
|
||||
id="force_script_name_only",
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
"PAPERLESS_FORCE_SCRIPT_NAME": "/docs",
|
||||
"PAPERLESS_LOGOUT_REDIRECT_URL": "/custom/logout",
|
||||
},
|
||||
(
|
||||
"/docs",
|
||||
"/docs/",
|
||||
"/docs/accounts/login/",
|
||||
"/docs/dashboard",
|
||||
"/custom/logout",
|
||||
),
|
||||
id="force_script_name_and_logout_redirect",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parse_hosting_settings(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
env: dict[str, str],
|
||||
expected: tuple[str | None, str, str, str, str],
|
||||
) -> None:
|
||||
"""Test parse_hosting_settings with various env configurations."""
|
||||
mocker.patch.dict(os.environ, env, clear=True)
|
||||
|
||||
result = parse_hosting_settings()
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def make_expected_schedule(
|
||||
overrides: dict[str, dict[str, Any]] | None = None,
|
||||
disabled: set[str] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Build the expected schedule with optional overrides and disabled tasks.
|
||||
"""
|
||||
|
||||
mail_expire = 9.0 * 60.0
|
||||
classifier_expire = 59.0 * 60.0
|
||||
index_expire = 23.0 * 60.0 * 60.0
|
||||
sanity_expire = ((7.0 * 24.0) - 1.0) * 60.0 * 60.0
|
||||
empty_trash_expire = 23.0 * 60.0 * 60.0
|
||||
workflow_expire = 59.0 * 60.0
|
||||
llm_index_expire = 23.0 * 60.0 * 60.0
|
||||
share_link_cleanup_expire = 23.0 * 60.0 * 60.0
|
||||
|
||||
schedule: dict[str, Any] = {
|
||||
"Check all e-mail accounts": {
|
||||
"task": "paperless_mail.tasks.process_mail_accounts",
|
||||
"schedule": crontab(minute="*/10"),
|
||||
"options": {"expires": mail_expire},
|
||||
},
|
||||
"Train the classifier": {
|
||||
"task": "documents.tasks.train_classifier",
|
||||
"schedule": crontab(minute="5", hour="*/1"),
|
||||
"options": {"expires": classifier_expire},
|
||||
},
|
||||
"Optimize the index": {
|
||||
"task": "documents.tasks.index_optimize",
|
||||
"schedule": crontab(minute=0, hour=0),
|
||||
"options": {"expires": index_expire},
|
||||
},
|
||||
"Perform sanity check": {
|
||||
"task": "documents.tasks.sanity_check",
|
||||
"schedule": crontab(minute=30, hour=0, day_of_week="sun"),
|
||||
"options": {"expires": sanity_expire},
|
||||
},
|
||||
"Empty trash": {
|
||||
"task": "documents.tasks.empty_trash",
|
||||
"schedule": crontab(minute=0, hour="1"),
|
||||
"options": {"expires": empty_trash_expire},
|
||||
},
|
||||
"Check and run scheduled workflows": {
|
||||
"task": "documents.tasks.check_scheduled_workflows",
|
||||
"schedule": crontab(minute="5", hour="*/1"),
|
||||
"options": {"expires": workflow_expire},
|
||||
},
|
||||
"Rebuild LLM index": {
|
||||
"task": "documents.tasks.llmindex_index",
|
||||
"schedule": crontab(minute="10", hour="2"),
|
||||
"options": {"expires": llm_index_expire},
|
||||
},
|
||||
"Cleanup expired share link bundles": {
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"schedule": crontab(minute=0, hour="2"),
|
||||
"options": {"expires": share_link_cleanup_expire},
|
||||
},
|
||||
}
|
||||
|
||||
overrides = overrides or {}
|
||||
disabled = disabled or set()
|
||||
|
||||
for key, val in overrides.items():
|
||||
schedule[key] = {**schedule.get(key, {}), **val}
|
||||
|
||||
for key in disabled:
|
||||
schedule.pop(key, None)
|
||||
|
||||
return schedule
|
||||
|
||||
|
||||
class TestParseBeatSchedule:
|
||||
@pytest.mark.parametrize(
|
||||
("env", "expected"),
|
||||
[
|
||||
pytest.param({}, make_expected_schedule(), id="defaults"),
|
||||
pytest.param(
|
||||
{"PAPERLESS_EMAIL_TASK_CRON": "*/50 * * * mon"},
|
||||
make_expected_schedule(
|
||||
overrides={
|
||||
"Check all e-mail accounts": {
|
||||
"schedule": crontab(minute="*/50", day_of_week="mon"),
|
||||
},
|
||||
},
|
||||
),
|
||||
id="email-changed",
|
||||
),
|
||||
pytest.param(
|
||||
{"PAPERLESS_INDEX_TASK_CRON": "disable"},
|
||||
make_expected_schedule(disabled={"Optimize the index"}),
|
||||
id="index-disabled",
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
"PAPERLESS_EMAIL_TASK_CRON": "disable",
|
||||
"PAPERLESS_TRAIN_TASK_CRON": "disable",
|
||||
"PAPERLESS_SANITY_TASK_CRON": "disable",
|
||||
"PAPERLESS_INDEX_TASK_CRON": "disable",
|
||||
"PAPERLESS_EMPTY_TRASH_TASK_CRON": "disable",
|
||||
"PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON": "disable",
|
||||
"PAPERLESS_LLM_INDEX_TASK_CRON": "disable",
|
||||
"PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON": "disable",
|
||||
},
|
||||
{},
|
||||
id="all-disabled",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parse_beat_schedule(
|
||||
self,
|
||||
env: dict[str, str],
|
||||
expected: dict[str, Any],
|
||||
mocker: MockerFixture,
|
||||
) -> None:
|
||||
mocker.patch.dict(os.environ, env, clear=False)
|
||||
schedule = parse_beat_schedule()
|
||||
assert schedule == expected
|
||||
|
||||
|
||||
class TestParseDbSettings:
|
||||
@@ -264,3 +533,85 @@ class TestParseDbSettings:
|
||||
settings = parse_db_settings(tmp_path)
|
||||
|
||||
assert settings == expected_database_settings
|
||||
|
||||
|
||||
class TestParseIgnoreDates:
|
||||
"""Tests the parsing of the PAPERLESS_IGNORE_DATES setting value."""
|
||||
|
||||
def test_no_ignore_dates_set(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- No ignore dates are set
|
||||
THEN:
|
||||
- No ignore dates are parsed
|
||||
"""
|
||||
assert parse_ignore_dates("", "YMD") == set()
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("env_str", "date_format", "expected"),
|
||||
[
|
||||
pytest.param(
|
||||
"1985-05-01",
|
||||
"YMD",
|
||||
{datetime.date(1985, 5, 1)},
|
||||
id="single-ymd",
|
||||
),
|
||||
pytest.param(
|
||||
"1985-05-01,1991-12-05",
|
||||
"YMD",
|
||||
{datetime.date(1985, 5, 1), datetime.date(1991, 12, 5)},
|
||||
id="multiple-ymd",
|
||||
),
|
||||
pytest.param(
|
||||
"2010-12-13",
|
||||
"YMD",
|
||||
{datetime.date(2010, 12, 13)},
|
||||
id="single-ymd-2",
|
||||
),
|
||||
pytest.param(
|
||||
"11.01.10",
|
||||
"DMY",
|
||||
{datetime.date(2010, 1, 11)},
|
||||
id="single-dmy",
|
||||
),
|
||||
pytest.param(
|
||||
"11.01.2001,15-06-1996",
|
||||
"DMY",
|
||||
{datetime.date(2001, 1, 11), datetime.date(1996, 6, 15)},
|
||||
id="multiple-dmy",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_ignore_dates_parsed(
|
||||
self,
|
||||
env_str: str,
|
||||
date_format: str,
|
||||
expected: set[datetime.date],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Ignore dates are set per certain inputs
|
||||
THEN:
|
||||
- All ignore dates are parsed
|
||||
"""
|
||||
assert parse_ignore_dates(env_str, date_format) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("languages", "expected"),
|
||||
[
|
||||
("de", ["de"]),
|
||||
("zh", ["zh"]),
|
||||
("fr+en", ["fr", "en"]),
|
||||
# Locales must be supported
|
||||
("en-001+fr-CA", ["en-001", "fr-CA"]),
|
||||
("en-001+fr", ["en-001", "fr"]),
|
||||
# Special case for Chinese: variants seem to miss some dates,
|
||||
# so we always add "zh" as a fallback.
|
||||
("en+zh-Hans-HK", ["en", "zh-Hans-HK", "zh"]),
|
||||
("en+zh-Hans", ["en", "zh-Hans", "zh"]),
|
||||
("en+zh-Hans+zh-Hant", ["en", "zh-Hans", "zh-Hant", "zh"]),
|
||||
],
|
||||
)
|
||||
def test_parse_dateparser_languages(languages: str, expected: list[str]) -> None:
|
||||
assert sorted(parse_dateparser_languages(languages)) == sorted(expected)
|
||||
|
||||
@@ -4,8 +4,12 @@ from pathlib import Path
|
||||
import pytest
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from paperless.settings.parsers import get_bool_from_env
|
||||
from paperless.settings.parsers import get_choice_from_env
|
||||
from paperless.settings.parsers import get_float_from_env
|
||||
from paperless.settings.parsers import get_int_from_env
|
||||
from paperless.settings.parsers import get_list_from_env
|
||||
from paperless.settings.parsers import get_path_from_env
|
||||
from paperless.settings.parsers import parse_dict_from_str
|
||||
from paperless.settings.parsers import str_to_bool
|
||||
|
||||
@@ -205,6 +209,29 @@ class TestParseDictFromString:
|
||||
assert isinstance(result["database"]["port"], int)
|
||||
|
||||
|
||||
class TestGetBoolFromEnv:
|
||||
def test_existing_env_var(self, mocker):
|
||||
"""Test that an existing environment variable is read and converted."""
|
||||
mocker.patch.dict(os.environ, {"TEST_VAR": "true"})
|
||||
assert get_bool_from_env("TEST_VAR") is True
|
||||
|
||||
def test_missing_env_var_uses_default_no(self, mocker):
|
||||
"""Test that a missing environment variable uses default 'NO' and returns False."""
|
||||
mocker.patch.dict(os.environ, {}, clear=True)
|
||||
assert get_bool_from_env("MISSING_VAR") is False
|
||||
|
||||
def test_missing_env_var_with_explicit_default(self, mocker):
|
||||
"""Test that a missing environment variable uses the provided default."""
|
||||
mocker.patch.dict(os.environ, {}, clear=True)
|
||||
assert get_bool_from_env("MISSING_VAR", default="yes") is True
|
||||
|
||||
def test_invalid_value_raises_error(self, mocker):
|
||||
"""Test that an invalid value raises ValueError (delegates to str_to_bool)."""
|
||||
mocker.patch.dict(os.environ, {"INVALID_VAR": "maybe"})
|
||||
with pytest.raises(ValueError):
|
||||
get_bool_from_env("INVALID_VAR")
|
||||
|
||||
|
||||
class TestGetIntFromEnv:
|
||||
@pytest.mark.parametrize(
|
||||
("env_value", "expected"),
|
||||
@@ -259,6 +286,199 @@ class TestGetIntFromEnv:
|
||||
get_int_from_env("INVALID_INT")
|
||||
|
||||
|
||||
class TestGetFloatFromEnv:
|
||||
@pytest.mark.parametrize(
|
||||
("env_value", "expected"),
|
||||
[
|
||||
pytest.param("3.14", 3.14, id="pi"),
|
||||
pytest.param("42", 42.0, id="int_as_float"),
|
||||
pytest.param("-2.5", -2.5, id="negative"),
|
||||
pytest.param("0.0", 0.0, id="zero_float"),
|
||||
pytest.param("0", 0.0, id="zero_int"),
|
||||
pytest.param("1.5e2", 150.0, id="sci_positive"),
|
||||
pytest.param("1e-3", 0.001, id="sci_negative"),
|
||||
pytest.param("-1.23e4", -12300.0, id="sci_large"),
|
||||
],
|
||||
)
|
||||
def test_existing_env_var_valid_floats(self, mocker, env_value, expected):
|
||||
"""Test that existing environment variables with valid floats return correct values."""
|
||||
mocker.patch.dict(os.environ, {"FLOAT_VAR": env_value})
|
||||
assert get_float_from_env("FLOAT_VAR") == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("default", "expected"),
|
||||
[
|
||||
pytest.param(3.14, 3.14, id="pi_default"),
|
||||
pytest.param(0.0, 0.0, id="zero_default"),
|
||||
pytest.param(-2.5, -2.5, id="negative_default"),
|
||||
pytest.param(None, None, id="none_default"),
|
||||
],
|
||||
)
|
||||
def test_missing_env_var_with_defaults(self, mocker, default, expected):
|
||||
"""Test that missing environment variables return provided defaults."""
|
||||
mocker.patch.dict(os.environ, {}, clear=True)
|
||||
assert get_float_from_env("MISSING_VAR", default=default) == expected
|
||||
|
||||
def test_missing_env_var_no_default(self, mocker):
|
||||
"""Test that missing environment variable with no default returns None."""
|
||||
mocker.patch.dict(os.environ, {}, clear=True)
|
||||
assert get_float_from_env("MISSING_VAR") is None
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_value",
|
||||
[
|
||||
pytest.param("not_a_number", id="text"),
|
||||
pytest.param("42.5.0", id="double_decimal"),
|
||||
pytest.param("42a", id="alpha_suffix"),
|
||||
pytest.param("", id="empty"),
|
||||
pytest.param(" ", id="whitespace"),
|
||||
pytest.param("true", id="boolean"),
|
||||
pytest.param("1.2.3", id="triple_decimal"),
|
||||
],
|
||||
)
|
||||
def test_invalid_float_values_raise_error(self, mocker, invalid_value):
|
||||
"""Test that invalid float values raise ValueError."""
|
||||
mocker.patch.dict(os.environ, {"INVALID_FLOAT": invalid_value})
|
||||
with pytest.raises(ValueError):
|
||||
get_float_from_env("INVALID_FLOAT")
|
||||
|
||||
|
||||
class TestGetPathFromEnv:
|
||||
@pytest.mark.parametrize(
|
||||
"env_value",
|
||||
[
|
||||
pytest.param("/tmp/test", id="absolute"),
|
||||
pytest.param("relative/path", id="relative"),
|
||||
pytest.param("/path/with spaces/file.txt", id="spaces"),
|
||||
pytest.param(".", id="current_dir"),
|
||||
pytest.param("..", id="parent_dir"),
|
||||
pytest.param("/", id="root"),
|
||||
],
|
||||
)
|
||||
def test_existing_env_var_paths(self, mocker, env_value):
|
||||
"""Test that existing environment variables with paths return resolved Path objects."""
|
||||
mocker.patch.dict(os.environ, {"PATH_VAR": env_value})
|
||||
result = get_path_from_env("PATH_VAR")
|
||||
assert isinstance(result, Path)
|
||||
assert result == Path(env_value).resolve()
|
||||
|
||||
def test_missing_env_var_no_default(self, mocker):
|
||||
"""Test that missing environment variable with no default returns None."""
|
||||
mocker.patch.dict(os.environ, {}, clear=True)
|
||||
assert get_path_from_env("MISSING_VAR") is None
|
||||
|
||||
def test_missing_env_var_with_none_default(self, mocker):
|
||||
"""Test that missing environment variable with None default returns None."""
|
||||
mocker.patch.dict(os.environ, {}, clear=True)
|
||||
assert get_path_from_env("MISSING_VAR", default=None) is None
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"default_path_str",
|
||||
[
|
||||
pytest.param("/default/path", id="absolute_default"),
|
||||
pytest.param("relative/default", id="relative_default"),
|
||||
pytest.param(".", id="current_default"),
|
||||
],
|
||||
)
|
||||
def test_missing_env_var_with_path_defaults(self, mocker, default_path_str):
|
||||
"""Test that missing environment variables return resolved default Path objects."""
|
||||
mocker.patch.dict(os.environ, {}, clear=True)
|
||||
default_path = Path(default_path_str)
|
||||
result = get_path_from_env("MISSING_VAR", default=default_path)
|
||||
assert isinstance(result, Path)
|
||||
assert result == default_path.resolve()
|
||||
|
||||
def test_relative_paths_are_resolved(self, mocker):
|
||||
"""Test that relative paths are properly resolved to absolute paths."""
|
||||
mocker.patch.dict(os.environ, {"REL_PATH": "relative/path"})
|
||||
result = get_path_from_env("REL_PATH")
|
||||
assert result is not None
|
||||
assert result.is_absolute()
|
||||
|
||||
|
||||
class TestGetListFromEnv:
|
||||
@pytest.mark.parametrize(
|
||||
("env_value", "expected"),
|
||||
[
|
||||
pytest.param("a,b,c", ["a", "b", "c"], id="basic_comma_separated"),
|
||||
pytest.param("single", ["single"], id="single_element"),
|
||||
pytest.param("", [], id="empty_string"),
|
||||
pytest.param("a, b , c", ["a", "b", "c"], id="whitespace_trimmed"),
|
||||
pytest.param("a,,b,c", ["a", "b", "c"], id="empty_elements_removed"),
|
||||
],
|
||||
)
|
||||
def test_existing_env_var_basic_parsing(self, mocker, env_value, expected):
|
||||
"""Test that existing environment variables are parsed correctly."""
|
||||
mocker.patch.dict(os.environ, {"LIST_VAR": env_value})
|
||||
result = get_list_from_env("LIST_VAR")
|
||||
assert result == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("separator", "env_value", "expected"),
|
||||
[
|
||||
pytest.param("|", "a|b|c", ["a", "b", "c"], id="pipe_separator"),
|
||||
pytest.param(":", "a:b:c", ["a", "b", "c"], id="colon_separator"),
|
||||
pytest.param(";", "a;b;c", ["a", "b", "c"], id="semicolon_separator"),
|
||||
],
|
||||
)
|
||||
def test_custom_separators(self, mocker, separator, env_value, expected):
|
||||
"""Test that custom separators work correctly."""
|
||||
mocker.patch.dict(os.environ, {"LIST_VAR": env_value})
|
||||
result = get_list_from_env("LIST_VAR", separator=separator)
|
||||
assert result == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("default", "expected"),
|
||||
[
|
||||
pytest.param(
|
||||
["default1", "default2"],
|
||||
["default1", "default2"],
|
||||
id="string_list_default",
|
||||
),
|
||||
pytest.param([1, 2, 3], [1, 2, 3], id="int_list_default"),
|
||||
pytest.param(None, [], id="none_default_returns_empty_list"),
|
||||
],
|
||||
)
|
||||
def test_missing_env_var_with_defaults(self, mocker, default, expected):
|
||||
"""Test that missing environment variables return provided defaults."""
|
||||
mocker.patch.dict(os.environ, {}, clear=True)
|
||||
result = get_list_from_env("MISSING_VAR", default=default)
|
||||
assert result == expected
|
||||
|
||||
def test_missing_env_var_no_default(self, mocker):
|
||||
"""Test that missing environment variable with no default returns empty list."""
|
||||
mocker.patch.dict(os.environ, {}, clear=True)
|
||||
result = get_list_from_env("MISSING_VAR")
|
||||
assert result == []
|
||||
|
||||
def test_required_env_var_missing_raises_error(self, mocker):
|
||||
"""Test that missing required environment variable raises ValueError."""
|
||||
mocker.patch.dict(os.environ, {}, clear=True)
|
||||
with pytest.raises(
|
||||
ValueError,
|
||||
match="Required environment variable 'REQUIRED_VAR' is not set",
|
||||
):
|
||||
get_list_from_env("REQUIRED_VAR", required=True)
|
||||
|
||||
def test_required_env_var_with_default_does_not_raise(self, mocker):
|
||||
"""Test that required environment variable with default does not raise error."""
|
||||
mocker.patch.dict(os.environ, {}, clear=True)
|
||||
result = get_list_from_env("REQUIRED_VAR", default=["default"], required=True)
|
||||
assert result == ["default"]
|
||||
|
||||
def test_strip_whitespace_false(self, mocker):
|
||||
"""Test that whitespace is preserved when strip_whitespace=False."""
|
||||
mocker.patch.dict(os.environ, {"LIST_VAR": " a , b , c "})
|
||||
result = get_list_from_env("LIST_VAR", strip_whitespace=False)
|
||||
assert result == [" a ", " b ", " c "]
|
||||
|
||||
def test_remove_empty_false(self, mocker):
|
||||
"""Test that empty elements are preserved when remove_empty=False."""
|
||||
mocker.patch.dict(os.environ, {"LIST_VAR": "a,,b,,c"})
|
||||
result = get_list_from_env("LIST_VAR", remove_empty=False)
|
||||
assert result == ["a", "", "b", "", "c"]
|
||||
|
||||
|
||||
class TestGetEnvChoice:
|
||||
@pytest.fixture
|
||||
def valid_choices(self) -> set[str]:
|
||||
@@ -394,21 +614,3 @@ class TestGetEnvChoice:
|
||||
result = get_choice_from_env("TEST_ENV", large_choices)
|
||||
|
||||
assert result == "option_50"
|
||||
|
||||
def test_different_env_keys(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
valid_choices: set[str],
|
||||
) -> None:
|
||||
"""Test function works with different environment variable keys."""
|
||||
test_cases = [
|
||||
("DJANGO_ENV", "development"),
|
||||
("DATABASE_BACKEND", "staging"),
|
||||
("LOG_LEVEL", "production"),
|
||||
("APP_MODE", "development"),
|
||||
]
|
||||
|
||||
for env_key, env_value in test_cases:
|
||||
mocker.patch.dict("os.environ", {env_key: env_value})
|
||||
result = get_choice_from_env(env_key, valid_choices)
|
||||
assert result == env_value
|
||||
|
||||
56
src/paperless/tests/settings/test_settings.py
Normal file
56
src/paperless/tests/settings/test_settings.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import os
|
||||
from unittest import TestCase
|
||||
from unittest import mock
|
||||
|
||||
from paperless.settings import _parse_paperless_url
|
||||
from paperless.settings import default_threads_per_worker
|
||||
|
||||
|
||||
class TestThreadCalculation(TestCase):
|
||||
def test_workers_threads(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Certain CPU counts
|
||||
WHEN:
|
||||
- Threads per worker is calculated
|
||||
THEN:
|
||||
- Threads per worker less than or equal to CPU count
|
||||
- At least 1 thread per worker
|
||||
"""
|
||||
default_workers = 1
|
||||
|
||||
for i in range(1, 64):
|
||||
with mock.patch(
|
||||
"paperless.settings.multiprocessing.cpu_count",
|
||||
) as cpu_count:
|
||||
cpu_count.return_value = i
|
||||
|
||||
default_threads = default_threads_per_worker(default_workers)
|
||||
|
||||
self.assertGreaterEqual(default_threads, 1)
|
||||
|
||||
self.assertLessEqual(default_workers * default_threads, i)
|
||||
|
||||
|
||||
class TestPaperlessURLSettings(TestCase):
|
||||
def test_paperless_url(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- PAPERLESS_URL is set
|
||||
WHEN:
|
||||
- The URL is parsed
|
||||
THEN:
|
||||
- The URL is returned and present in related settings
|
||||
"""
|
||||
with mock.patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
"PAPERLESS_URL": "https://example.com",
|
||||
},
|
||||
):
|
||||
url = _parse_paperless_url()
|
||||
self.assertEqual("https://example.com", url)
|
||||
from django.conf import settings
|
||||
|
||||
self.assertIn(url, settings.CSRF_TRUSTED_ORIGINS)
|
||||
self.assertIn(url, settings.CORS_ALLOWED_ORIGINS)
|
||||
@@ -1,107 +1,100 @@
|
||||
from unittest import mock
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
from allauth.account.adapter import get_adapter
|
||||
from allauth.core import context
|
||||
from allauth.socialaccount.adapter import get_adapter as get_social_adapter
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.contrib.auth.models import Group
|
||||
from django.contrib.auth.models import User
|
||||
from django.forms import ValidationError
|
||||
from django.http import HttpRequest
|
||||
from django.test import TestCase
|
||||
from django.test import override_settings
|
||||
from django.urls import reverse
|
||||
from pytest_django.fixtures import SettingsWrapper
|
||||
from pytest_mock import MockerFixture
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
from paperless.adapter import DrfTokenStrategy
|
||||
|
||||
|
||||
class TestCustomAccountAdapter(TestCase):
|
||||
def test_is_open_for_signup(self) -> None:
|
||||
@pytest.mark.django_db
|
||||
class TestCustomAccountAdapter:
|
||||
def test_is_open_for_signup(self, settings: SettingsWrapper) -> None:
|
||||
adapter = get_adapter()
|
||||
|
||||
# With no accounts, signups should be allowed
|
||||
self.assertTrue(adapter.is_open_for_signup(None))
|
||||
assert adapter.is_open_for_signup(None)
|
||||
|
||||
User.objects.create_user("testuser")
|
||||
|
||||
# Test when ACCOUNT_ALLOW_SIGNUPS is True
|
||||
settings.ACCOUNT_ALLOW_SIGNUPS = True
|
||||
self.assertTrue(adapter.is_open_for_signup(None))
|
||||
assert adapter.is_open_for_signup(None)
|
||||
|
||||
# Test when ACCOUNT_ALLOW_SIGNUPS is False
|
||||
settings.ACCOUNT_ALLOW_SIGNUPS = False
|
||||
self.assertFalse(adapter.is_open_for_signup(None))
|
||||
assert not adapter.is_open_for_signup(None)
|
||||
|
||||
def test_is_safe_url(self) -> None:
|
||||
def test_is_safe_url(self, settings: SettingsWrapper) -> None:
|
||||
request = HttpRequest()
|
||||
request.get_host = mock.Mock(return_value="example.com")
|
||||
request.get_host = lambda: "example.com"
|
||||
with context.request_context(request):
|
||||
adapter = get_adapter()
|
||||
with override_settings(ALLOWED_HOSTS=["*"]):
|
||||
# True because request host is same
|
||||
url = "https://example.com"
|
||||
self.assertTrue(adapter.is_safe_url(url))
|
||||
|
||||
url = "https://evil.com"
|
||||
settings.ALLOWED_HOSTS = ["*"]
|
||||
# True because request host is same
|
||||
assert adapter.is_safe_url("https://example.com")
|
||||
# False despite wildcard because request host is different
|
||||
self.assertFalse(adapter.is_safe_url(url))
|
||||
assert not adapter.is_safe_url("https://evil.com")
|
||||
|
||||
settings.ALLOWED_HOSTS = ["example.com"]
|
||||
url = "https://example.com"
|
||||
# True because request host is same
|
||||
self.assertTrue(adapter.is_safe_url(url))
|
||||
assert adapter.is_safe_url("https://example.com")
|
||||
|
||||
settings.ALLOWED_HOSTS = ["*", "example.com"]
|
||||
url = "//evil.com"
|
||||
# False because request host is not in allowed hosts
|
||||
self.assertFalse(adapter.is_safe_url(url))
|
||||
assert not adapter.is_safe_url("//evil.com")
|
||||
|
||||
@mock.patch("allauth.core.internal.ratelimit.consume", return_value=True)
|
||||
def test_pre_authenticate(self, mock_consume) -> None:
|
||||
def test_pre_authenticate(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
mocker: MockerFixture,
|
||||
) -> None:
|
||||
mocker.patch("allauth.core.internal.ratelimit.consume", return_value=True)
|
||||
adapter = get_adapter()
|
||||
request = HttpRequest()
|
||||
request.get_host = mock.Mock(return_value="example.com")
|
||||
request.get_host = lambda: "example.com"
|
||||
|
||||
settings.DISABLE_REGULAR_LOGIN = False
|
||||
adapter.pre_authenticate(request)
|
||||
|
||||
settings.DISABLE_REGULAR_LOGIN = True
|
||||
with self.assertRaises(ValidationError):
|
||||
with pytest.raises(ValidationError):
|
||||
adapter.pre_authenticate(request)
|
||||
|
||||
def test_get_reset_password_from_key_url(self) -> None:
|
||||
def test_get_reset_password_from_key_url(self, settings: SettingsWrapper) -> None:
|
||||
request = HttpRequest()
|
||||
request.get_host = mock.Mock(return_value="foo.org")
|
||||
request.get_host = lambda: "foo.org"
|
||||
with context.request_context(request):
|
||||
adapter = get_adapter()
|
||||
|
||||
# Test when PAPERLESS_URL is None
|
||||
with override_settings(
|
||||
PAPERLESS_URL=None,
|
||||
ACCOUNT_DEFAULT_HTTP_PROTOCOL="https",
|
||||
):
|
||||
expected_url = f"https://foo.org{reverse('account_reset_password_from_key', kwargs={'uidb36': 'UID', 'key': 'KEY'})}"
|
||||
self.assertEqual(
|
||||
adapter.get_reset_password_from_key_url("UID-KEY"),
|
||||
expected_url,
|
||||
)
|
||||
settings.PAPERLESS_URL = None
|
||||
settings.ACCOUNT_DEFAULT_HTTP_PROTOCOL = "https"
|
||||
expected_url = f"https://foo.org{reverse('account_reset_password_from_key', kwargs={'uidb36': 'UID', 'key': 'KEY'})}"
|
||||
assert adapter.get_reset_password_from_key_url("UID-KEY") == expected_url
|
||||
|
||||
# Test when PAPERLESS_URL is not None
|
||||
with override_settings(PAPERLESS_URL="https://bar.com"):
|
||||
expected_url = f"https://bar.com{reverse('account_reset_password_from_key', kwargs={'uidb36': 'UID', 'key': 'KEY'})}"
|
||||
self.assertEqual(
|
||||
adapter.get_reset_password_from_key_url("UID-KEY"),
|
||||
expected_url,
|
||||
)
|
||||
settings.PAPERLESS_URL = "https://bar.com"
|
||||
expected_url = f"https://bar.com{reverse('account_reset_password_from_key', kwargs={'uidb36': 'UID', 'key': 'KEY'})}"
|
||||
assert adapter.get_reset_password_from_key_url("UID-KEY") == expected_url
|
||||
|
||||
@override_settings(ACCOUNT_DEFAULT_GROUPS=["group1", "group2"])
|
||||
def test_save_user_adds_groups(self) -> None:
|
||||
def test_save_user_adds_groups(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
mocker: MockerFixture,
|
||||
) -> None:
|
||||
settings.ACCOUNT_DEFAULT_GROUPS = ["group1", "group2"]
|
||||
Group.objects.create(name="group1")
|
||||
user = User.objects.create_user("testuser")
|
||||
adapter = get_adapter()
|
||||
form = mock.Mock(
|
||||
form = mocker.MagicMock(
|
||||
cleaned_data={
|
||||
"username": "testuser",
|
||||
"email": "user@example.com",
|
||||
@@ -110,88 +103,81 @@ class TestCustomAccountAdapter(TestCase):
|
||||
|
||||
user = adapter.save_user(HttpRequest(), user, form, commit=True)
|
||||
|
||||
self.assertEqual(user.groups.count(), 1)
|
||||
self.assertTrue(user.groups.filter(name="group1").exists())
|
||||
self.assertFalse(user.groups.filter(name="group2").exists())
|
||||
assert user.groups.count() == 1
|
||||
assert user.groups.filter(name="group1").exists()
|
||||
assert not user.groups.filter(name="group2").exists()
|
||||
|
||||
def test_fresh_install_save_creates_superuser(self) -> None:
|
||||
def test_fresh_install_save_creates_superuser(self, mocker: MockerFixture) -> None:
|
||||
adapter = get_adapter()
|
||||
form = mock.Mock(
|
||||
form = mocker.MagicMock(
|
||||
cleaned_data={
|
||||
"username": "testuser",
|
||||
"email": "user@paperless-ngx.com",
|
||||
},
|
||||
)
|
||||
user = adapter.save_user(HttpRequest(), User(), form, commit=True)
|
||||
self.assertTrue(user.is_superuser)
|
||||
assert user.is_superuser
|
||||
|
||||
# Next time, it should not create a superuser
|
||||
form = mock.Mock(
|
||||
form = mocker.MagicMock(
|
||||
cleaned_data={
|
||||
"username": "testuser2",
|
||||
"email": "user2@paperless-ngx.com",
|
||||
},
|
||||
)
|
||||
user2 = adapter.save_user(HttpRequest(), User(), form, commit=True)
|
||||
self.assertFalse(user2.is_superuser)
|
||||
assert not user2.is_superuser
|
||||
|
||||
|
||||
class TestCustomSocialAccountAdapter(TestCase):
|
||||
def test_is_open_for_signup(self) -> None:
|
||||
class TestCustomSocialAccountAdapter:
|
||||
@pytest.mark.django_db
|
||||
def test_is_open_for_signup(self, settings: SettingsWrapper) -> None:
|
||||
adapter = get_social_adapter()
|
||||
|
||||
# Test when SOCIALACCOUNT_ALLOW_SIGNUPS is True
|
||||
settings.SOCIALACCOUNT_ALLOW_SIGNUPS = True
|
||||
self.assertTrue(adapter.is_open_for_signup(None, None))
|
||||
assert adapter.is_open_for_signup(None, None)
|
||||
|
||||
# Test when SOCIALACCOUNT_ALLOW_SIGNUPS is False
|
||||
settings.SOCIALACCOUNT_ALLOW_SIGNUPS = False
|
||||
self.assertFalse(adapter.is_open_for_signup(None, None))
|
||||
assert not adapter.is_open_for_signup(None, None)
|
||||
|
||||
def test_get_connect_redirect_url(self) -> None:
|
||||
adapter = get_social_adapter()
|
||||
request = None
|
||||
socialaccount = None
|
||||
assert adapter.get_connect_redirect_url(None, None) == reverse("base")
|
||||
|
||||
# Test the default URL
|
||||
expected_url = reverse("base")
|
||||
self.assertEqual(
|
||||
adapter.get_connect_redirect_url(request, socialaccount),
|
||||
expected_url,
|
||||
)
|
||||
|
||||
@override_settings(SOCIAL_ACCOUNT_DEFAULT_GROUPS=["group1", "group2"])
|
||||
def test_save_user_adds_groups(self) -> None:
|
||||
@pytest.mark.django_db
|
||||
def test_save_user_adds_groups(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
mocker: MockerFixture,
|
||||
) -> None:
|
||||
settings.SOCIAL_ACCOUNT_DEFAULT_GROUPS = ["group1", "group2"]
|
||||
Group.objects.create(name="group1")
|
||||
adapter = get_social_adapter()
|
||||
request = HttpRequest()
|
||||
user = User.objects.create_user("testuser")
|
||||
sociallogin = mock.Mock(
|
||||
user=user,
|
||||
)
|
||||
sociallogin = mocker.MagicMock(user=user)
|
||||
|
||||
user = adapter.save_user(request, sociallogin, None)
|
||||
user = adapter.save_user(HttpRequest(), sociallogin, None)
|
||||
|
||||
self.assertEqual(user.groups.count(), 1)
|
||||
self.assertTrue(user.groups.filter(name="group1").exists())
|
||||
self.assertFalse(user.groups.filter(name="group2").exists())
|
||||
assert user.groups.count() == 1
|
||||
assert user.groups.filter(name="group1").exists()
|
||||
assert not user.groups.filter(name="group2").exists()
|
||||
|
||||
def test_error_logged_on_authentication_error(self) -> None:
|
||||
def test_error_logged_on_authentication_error(
|
||||
self,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
adapter = get_social_adapter()
|
||||
request = HttpRequest()
|
||||
with self.assertLogs("paperless.auth", level="INFO") as log_cm:
|
||||
with caplog.at_level(logging.INFO, logger="paperless.auth"):
|
||||
adapter.on_authentication_error(
|
||||
request,
|
||||
HttpRequest(),
|
||||
provider="test-provider",
|
||||
error="Error",
|
||||
exception="Test authentication error",
|
||||
)
|
||||
self.assertTrue(
|
||||
any("Test authentication error" in message for message in log_cm.output),
|
||||
)
|
||||
assert any("Test authentication error" in msg for msg in caplog.messages)
|
||||
|
||||
|
||||
class TestDrfTokenStrategy(TestCase):
|
||||
@pytest.mark.django_db
|
||||
class TestDrfTokenStrategy:
|
||||
def test_create_access_token_creates_new_token(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -201,7 +187,6 @@ class TestDrfTokenStrategy(TestCase):
|
||||
THEN:
|
||||
- A new token is created and its key is returned
|
||||
"""
|
||||
|
||||
user = User.objects.create_user("testuser")
|
||||
request = HttpRequest()
|
||||
request.user = user
|
||||
@@ -209,13 +194,9 @@ class TestDrfTokenStrategy(TestCase):
|
||||
strategy = DrfTokenStrategy()
|
||||
token_key = strategy.create_access_token(request)
|
||||
|
||||
# Verify a token was created
|
||||
self.assertIsNotNone(token_key)
|
||||
self.assertTrue(Token.objects.filter(user=user).exists())
|
||||
|
||||
# Verify the returned key matches the created token
|
||||
token = Token.objects.get(user=user)
|
||||
self.assertEqual(token_key, token.key)
|
||||
assert token_key is not None
|
||||
assert Token.objects.filter(user=user).exists()
|
||||
assert token_key == Token.objects.get(user=user).key
|
||||
|
||||
def test_create_access_token_returns_existing_token(self) -> None:
|
||||
"""
|
||||
@@ -226,7 +207,6 @@ class TestDrfTokenStrategy(TestCase):
|
||||
THEN:
|
||||
- The same token key is returned (no new token created)
|
||||
"""
|
||||
|
||||
user = User.objects.create_user("testuser")
|
||||
existing_token = Token.objects.create(user=user)
|
||||
|
||||
@@ -236,11 +216,8 @@ class TestDrfTokenStrategy(TestCase):
|
||||
strategy = DrfTokenStrategy()
|
||||
token_key = strategy.create_access_token(request)
|
||||
|
||||
# Verify the existing token key is returned
|
||||
self.assertEqual(token_key, existing_token.key)
|
||||
|
||||
# Verify only one token exists (no duplicate created)
|
||||
self.assertEqual(Token.objects.filter(user=user).count(), 1)
|
||||
assert token_key == existing_token.key
|
||||
assert Token.objects.filter(user=user).count() == 1
|
||||
|
||||
def test_create_access_token_returns_none_for_unauthenticated_user(self) -> None:
|
||||
"""
|
||||
@@ -251,12 +228,11 @@ class TestDrfTokenStrategy(TestCase):
|
||||
THEN:
|
||||
- None is returned and no token is created
|
||||
"""
|
||||
|
||||
request = HttpRequest()
|
||||
request.user = AnonymousUser()
|
||||
|
||||
strategy = DrfTokenStrategy()
|
||||
token_key = strategy.create_access_token(request)
|
||||
|
||||
self.assertIsNone(token_key)
|
||||
self.assertEqual(Token.objects.count(), 0)
|
||||
assert token_key is None
|
||||
assert Token.objects.count() == 0
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
import os
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from django.core.checks import Error
|
||||
from django.core.checks import Warning
|
||||
from django.test import TestCase
|
||||
from django.test import override_settings
|
||||
from pytest_django.fixtures import SettingsWrapper
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from documents.tests.utils import FileSystemAssertsMixin
|
||||
from paperless.checks import audit_log_check
|
||||
from paperless.checks import binaries_check
|
||||
from paperless.checks import check_deprecated_db_settings
|
||||
@@ -20,54 +19,84 @@ from paperless.checks import paths_check
|
||||
from paperless.checks import settings_values_check
|
||||
|
||||
|
||||
class TestChecks(DirectoriesMixin, TestCase):
|
||||
def test_binaries(self) -> None:
|
||||
self.assertEqual(binaries_check(None), [])
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class PaperlessTestDirs:
|
||||
data_dir: Path
|
||||
media_dir: Path
|
||||
consumption_dir: Path
|
||||
|
||||
@override_settings(CONVERT_BINARY="uuuhh")
|
||||
def test_binaries_fail(self) -> None:
|
||||
self.assertEqual(len(binaries_check(None)), 1)
|
||||
|
||||
def test_paths_check(self) -> None:
|
||||
self.assertEqual(paths_check(None), [])
|
||||
# TODO: consolidate with documents/tests/conftest.py PaperlessDirs/paperless_dirs
|
||||
# once the paperless and documents test suites are ready to share fixtures.
|
||||
@pytest.fixture()
|
||||
def directories(tmp_path: Path, settings: SettingsWrapper) -> PaperlessTestDirs:
|
||||
data_dir = tmp_path / "data"
|
||||
media_dir = tmp_path / "media"
|
||||
consumption_dir = tmp_path / "consumption"
|
||||
|
||||
@override_settings(
|
||||
MEDIA_ROOT=Path("uuh"),
|
||||
DATA_DIR=Path("whatever"),
|
||||
CONSUMPTION_DIR=Path("idontcare"),
|
||||
for d in (data_dir, media_dir, consumption_dir):
|
||||
d.mkdir()
|
||||
|
||||
settings.DATA_DIR = data_dir
|
||||
settings.MEDIA_ROOT = media_dir
|
||||
settings.CONSUMPTION_DIR = consumption_dir
|
||||
|
||||
return PaperlessTestDirs(
|
||||
data_dir=data_dir,
|
||||
media_dir=media_dir,
|
||||
consumption_dir=consumption_dir,
|
||||
)
|
||||
def test_paths_check_dont_exist(self) -> None:
|
||||
msgs = paths_check(None)
|
||||
self.assertEqual(len(msgs), 3, str(msgs))
|
||||
|
||||
for msg in msgs:
|
||||
self.assertTrue(msg.msg.endswith("is set but doesn't exist."))
|
||||
|
||||
def test_paths_check_no_access(self) -> None:
|
||||
Path(self.dirs.data_dir).chmod(0o000)
|
||||
Path(self.dirs.media_dir).chmod(0o000)
|
||||
Path(self.dirs.consumption_dir).chmod(0o000)
|
||||
class TestChecks:
|
||||
def test_binaries(self) -> None:
|
||||
assert binaries_check(None) == []
|
||||
|
||||
self.addCleanup(os.chmod, self.dirs.data_dir, 0o777)
|
||||
self.addCleanup(os.chmod, self.dirs.media_dir, 0o777)
|
||||
self.addCleanup(os.chmod, self.dirs.consumption_dir, 0o777)
|
||||
def test_binaries_fail(self, settings: SettingsWrapper) -> None:
|
||||
settings.CONVERT_BINARY = "uuuhh"
|
||||
assert len(binaries_check(None)) == 1
|
||||
|
||||
@pytest.mark.usefixtures("directories")
|
||||
def test_paths_check(self) -> None:
|
||||
assert paths_check(None) == []
|
||||
|
||||
def test_paths_check_dont_exist(self, settings: SettingsWrapper) -> None:
|
||||
settings.MEDIA_ROOT = Path("uuh")
|
||||
settings.DATA_DIR = Path("whatever")
|
||||
settings.CONSUMPTION_DIR = Path("idontcare")
|
||||
|
||||
msgs = paths_check(None)
|
||||
self.assertEqual(len(msgs), 3)
|
||||
|
||||
assert len(msgs) == 3, str(msgs)
|
||||
for msg in msgs:
|
||||
self.assertTrue(msg.msg.endswith("is not writeable"))
|
||||
assert msg.msg.endswith("is set but doesn't exist.")
|
||||
|
||||
@override_settings(DEBUG=False)
|
||||
def test_debug_disabled(self) -> None:
|
||||
self.assertEqual(debug_mode_check(None), [])
|
||||
def test_paths_check_no_access(self, directories: PaperlessTestDirs) -> None:
|
||||
directories.data_dir.chmod(0o000)
|
||||
directories.media_dir.chmod(0o000)
|
||||
directories.consumption_dir.chmod(0o000)
|
||||
|
||||
@override_settings(DEBUG=True)
|
||||
def test_debug_enabled(self) -> None:
|
||||
self.assertEqual(len(debug_mode_check(None)), 1)
|
||||
try:
|
||||
msgs = paths_check(None)
|
||||
finally:
|
||||
directories.data_dir.chmod(0o777)
|
||||
directories.media_dir.chmod(0o777)
|
||||
directories.consumption_dir.chmod(0o777)
|
||||
|
||||
assert len(msgs) == 3
|
||||
for msg in msgs:
|
||||
assert msg.msg.endswith("is not writeable")
|
||||
|
||||
def test_debug_disabled(self, settings: SettingsWrapper) -> None:
|
||||
settings.DEBUG = False
|
||||
assert debug_mode_check(None) == []
|
||||
|
||||
def test_debug_enabled(self, settings: SettingsWrapper) -> None:
|
||||
settings.DEBUG = True
|
||||
assert len(debug_mode_check(None)) == 1
|
||||
|
||||
|
||||
class TestSettingsChecksAgainstDefaults(DirectoriesMixin, TestCase):
|
||||
class TestSettingsChecksAgainstDefaults:
|
||||
def test_all_valid(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -78,104 +107,71 @@ class TestSettingsChecksAgainstDefaults(DirectoriesMixin, TestCase):
|
||||
- No system check errors reported
|
||||
"""
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 0)
|
||||
assert len(msgs) == 0
|
||||
|
||||
|
||||
class TestOcrSettingsChecks(DirectoriesMixin, TestCase):
|
||||
@override_settings(OCR_OUTPUT_TYPE="notapdf")
|
||||
def test_invalid_output_type(self) -> None:
|
||||
class TestOcrSettingsChecks:
|
||||
@pytest.mark.parametrize(
|
||||
("setting", "value", "expected_msg"),
|
||||
[
|
||||
pytest.param(
|
||||
"OCR_OUTPUT_TYPE",
|
||||
"notapdf",
|
||||
'OCR output type "notapdf"',
|
||||
id="invalid-output-type",
|
||||
),
|
||||
pytest.param(
|
||||
"OCR_MODE",
|
||||
"makeitso",
|
||||
'OCR output mode "makeitso"',
|
||||
id="invalid-mode",
|
||||
),
|
||||
pytest.param(
|
||||
"OCR_MODE",
|
||||
"skip_noarchive",
|
||||
"deprecated",
|
||||
id="deprecated-mode",
|
||||
),
|
||||
pytest.param(
|
||||
"OCR_SKIP_ARCHIVE_FILE",
|
||||
"invalid",
|
||||
'OCR_SKIP_ARCHIVE_FILE setting "invalid"',
|
||||
id="invalid-skip-archive-file",
|
||||
),
|
||||
pytest.param(
|
||||
"OCR_CLEAN",
|
||||
"cleanme",
|
||||
'OCR clean mode "cleanme"',
|
||||
id="invalid-clean",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_invalid_setting_produces_one_error(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
setting: str,
|
||||
value: str,
|
||||
expected_msg: str,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
- OCR output type is invalid
|
||||
- One OCR setting is set to an invalid value
|
||||
WHEN:
|
||||
- Settings are validated
|
||||
THEN:
|
||||
- system check error reported for OCR output type
|
||||
- Exactly one system check error is reported containing the expected message
|
||||
"""
|
||||
setattr(settings, setting, value)
|
||||
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('OCR output type "notapdf"', msg.msg)
|
||||
|
||||
@override_settings(OCR_MODE="makeitso")
|
||||
def test_invalid_ocr_type(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
- OCR type is invalid
|
||||
WHEN:
|
||||
- Settings are validated
|
||||
THEN:
|
||||
- system check error reported for OCR type
|
||||
"""
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('OCR output mode "makeitso"', msg.msg)
|
||||
|
||||
@override_settings(OCR_MODE="skip_noarchive")
|
||||
def test_deprecated_ocr_type(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
- OCR type is deprecated
|
||||
WHEN:
|
||||
- Settings are validated
|
||||
THEN:
|
||||
- deprecation warning reported for OCR type
|
||||
"""
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn("deprecated", msg.msg)
|
||||
|
||||
@override_settings(OCR_SKIP_ARCHIVE_FILE="invalid")
|
||||
def test_invalid_ocr_skip_archive_file(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
- OCR_SKIP_ARCHIVE_FILE is invalid
|
||||
WHEN:
|
||||
- Settings are validated
|
||||
THEN:
|
||||
- system check error reported for OCR_SKIP_ARCHIVE_FILE
|
||||
"""
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('OCR_SKIP_ARCHIVE_FILE setting "invalid"', msg.msg)
|
||||
|
||||
@override_settings(OCR_CLEAN="cleanme")
|
||||
def test_invalid_ocr_clean(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
- OCR cleaning type is invalid
|
||||
WHEN:
|
||||
- Settings are validated
|
||||
THEN:
|
||||
- system check error reported for OCR cleaning type
|
||||
"""
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('OCR clean mode "cleanme"', msg.msg)
|
||||
assert len(msgs) == 1
|
||||
assert expected_msg in msgs[0].msg
|
||||
|
||||
|
||||
class TestTimezoneSettingsChecks(DirectoriesMixin, TestCase):
|
||||
@override_settings(TIME_ZONE="TheMoon\\MyCrater")
|
||||
def test_invalid_timezone(self) -> None:
|
||||
class TestTimezoneSettingsChecks:
|
||||
def test_invalid_timezone(self, settings: SettingsWrapper) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
@@ -185,17 +181,16 @@ class TestTimezoneSettingsChecks(DirectoriesMixin, TestCase):
|
||||
THEN:
|
||||
- system check error reported for timezone
|
||||
"""
|
||||
settings.TIME_ZONE = "TheMoon\\MyCrater"
|
||||
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('Timezone "TheMoon\\MyCrater"', msg.msg)
|
||||
assert len(msgs) == 1
|
||||
assert 'Timezone "TheMoon\\MyCrater"' in msgs[0].msg
|
||||
|
||||
|
||||
class TestEmailCertSettingsChecks(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
@override_settings(EMAIL_CERTIFICATE_FILE=Path("/tmp/not_actually_here.pem"))
|
||||
def test_not_valid_file(self) -> None:
|
||||
class TestEmailCertSettingsChecks:
|
||||
def test_not_valid_file(self, settings: SettingsWrapper) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Default settings
|
||||
@@ -205,19 +200,22 @@ class TestEmailCertSettingsChecks(DirectoriesMixin, FileSystemAssertsMixin, Test
|
||||
THEN:
|
||||
- system check error reported for email certificate
|
||||
"""
|
||||
self.assertIsNotFile("/tmp/not_actually_here.pem")
|
||||
cert_path = Path("/tmp/not_actually_here.pem")
|
||||
assert not cert_path.is_file()
|
||||
settings.EMAIL_CERTIFICATE_FILE = cert_path
|
||||
|
||||
msgs = settings_values_check(None)
|
||||
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn("Email cert /tmp/not_actually_here.pem is not a file", msg.msg)
|
||||
assert len(msgs) == 1
|
||||
assert "Email cert /tmp/not_actually_here.pem is not a file" in msgs[0].msg
|
||||
|
||||
|
||||
class TestAuditLogChecks(TestCase):
|
||||
def test_was_enabled_once(self) -> None:
|
||||
class TestAuditLogChecks:
|
||||
def test_was_enabled_once(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
mocker: MockerFixture,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Audit log is not enabled
|
||||
@@ -226,23 +224,18 @@ class TestAuditLogChecks(TestCase):
|
||||
THEN:
|
||||
- system check error reported for disabling audit log
|
||||
"""
|
||||
introspect_mock = mock.MagicMock()
|
||||
settings.AUDIT_LOG_ENABLED = False
|
||||
introspect_mock = mocker.MagicMock()
|
||||
introspect_mock.introspection.table_names.return_value = ["auditlog_logentry"]
|
||||
with override_settings(AUDIT_LOG_ENABLED=False):
|
||||
with mock.patch.dict(
|
||||
"paperless.checks.connections",
|
||||
{"default": introspect_mock},
|
||||
):
|
||||
msgs = audit_log_check(None)
|
||||
mocker.patch.dict(
|
||||
"paperless.checks.connections",
|
||||
{"default": introspect_mock},
|
||||
)
|
||||
|
||||
self.assertEqual(len(msgs), 1)
|
||||
msgs = audit_log_check(None)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn(
|
||||
("auditlog table was found but audit log is disabled."),
|
||||
msg.msg,
|
||||
)
|
||||
assert len(msgs) == 1
|
||||
assert "auditlog table was found but audit log is disabled." in msgs[0].msg
|
||||
|
||||
|
||||
DEPRECATED_VARS: dict[str, str] = {
|
||||
@@ -271,20 +264,16 @@ class TestDeprecatedDbSettings:
|
||||
@pytest.mark.parametrize(
|
||||
("env_var", "db_option_key"),
|
||||
[
|
||||
("PAPERLESS_DB_TIMEOUT", "timeout"),
|
||||
("PAPERLESS_DB_POOLSIZE", "pool.min_size / pool.max_size"),
|
||||
("PAPERLESS_DBSSLMODE", "sslmode"),
|
||||
("PAPERLESS_DBSSLROOTCERT", "sslrootcert"),
|
||||
("PAPERLESS_DBSSLCERT", "sslcert"),
|
||||
("PAPERLESS_DBSSLKEY", "sslkey"),
|
||||
],
|
||||
ids=[
|
||||
"db-timeout",
|
||||
"db-poolsize",
|
||||
"ssl-mode",
|
||||
"ssl-rootcert",
|
||||
"ssl-cert",
|
||||
"ssl-key",
|
||||
pytest.param("PAPERLESS_DB_TIMEOUT", "timeout", id="db-timeout"),
|
||||
pytest.param(
|
||||
"PAPERLESS_DB_POOLSIZE",
|
||||
"pool.min_size / pool.max_size",
|
||||
id="db-poolsize",
|
||||
),
|
||||
pytest.param("PAPERLESS_DBSSLMODE", "sslmode", id="ssl-mode"),
|
||||
pytest.param("PAPERLESS_DBSSLROOTCERT", "sslrootcert", id="ssl-rootcert"),
|
||||
pytest.param("PAPERLESS_DBSSLCERT", "sslcert", id="ssl-cert"),
|
||||
pytest.param("PAPERLESS_DBSSLKEY", "sslkey", id="ssl-key"),
|
||||
],
|
||||
)
|
||||
def test_single_deprecated_var_produces_one_warning(
|
||||
@@ -403,7 +392,10 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
"""Test suite for check_v3_minimum_upgrade_version system check."""
|
||||
|
||||
@pytest.fixture
|
||||
def build_conn_mock(self, mocker: MockerFixture):
|
||||
def build_conn_mock(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
) -> Callable[[list[str], list[str]], mock.MagicMock]:
|
||||
"""Factory fixture that builds a connections['default'] mock.
|
||||
|
||||
Usage::
|
||||
@@ -423,7 +415,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_no_migrations_table_fresh_install(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -442,7 +434,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_no_documents_migrations_fresh_install(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -461,7 +453,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_v3_state_with_0001_squashed(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -485,7 +477,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_v3_state_with_0002_squashed_only(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -504,7 +496,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_v2_20_9_state_ready_to_upgrade(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -531,7 +523,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_v2_20_8_raises_error(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -558,7 +550,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_very_old_version_raises_error(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -585,7 +577,7 @@ class TestV3MinimumUpgradeVersionCheck:
|
||||
def test_error_hint_mentions_v2_20_9(
|
||||
self,
|
||||
mocker: MockerFixture,
|
||||
build_conn_mock,
|
||||
build_conn_mock: Callable[[list[str], list[str]], mock.MagicMock],
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
|
||||
@@ -1,482 +0,0 @@
|
||||
import datetime
|
||||
import os
|
||||
from unittest import TestCase
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from celery.schedules import crontab
|
||||
|
||||
from paperless.settings import _parse_base_paths
|
||||
from paperless.settings import _parse_beat_schedule
|
||||
from paperless.settings import _parse_dateparser_languages
|
||||
from paperless.settings import _parse_ignore_dates
|
||||
from paperless.settings import _parse_paperless_url
|
||||
from paperless.settings import _parse_redis_url
|
||||
from paperless.settings import default_threads_per_worker
|
||||
|
||||
|
||||
class TestIgnoreDateParsing(TestCase):
|
||||
"""
|
||||
Tests the parsing of the PAPERLESS_IGNORE_DATES setting value
|
||||
"""
|
||||
|
||||
def _parse_checker(self, test_cases) -> None:
|
||||
"""
|
||||
Helper function to check ignore date parsing
|
||||
|
||||
Args:
|
||||
test_cases (_type_): _description_
|
||||
"""
|
||||
for env_str, date_format, expected_date_set in test_cases:
|
||||
self.assertSetEqual(
|
||||
_parse_ignore_dates(env_str, date_format),
|
||||
expected_date_set,
|
||||
)
|
||||
|
||||
def test_no_ignore_dates_set(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- No ignore dates are set
|
||||
THEN:
|
||||
- No ignore dates are parsed
|
||||
"""
|
||||
self.assertSetEqual(_parse_ignore_dates(""), set())
|
||||
|
||||
def test_single_ignore_dates_set(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Ignore dates are set per certain inputs
|
||||
THEN:
|
||||
- All ignore dates are parsed
|
||||
"""
|
||||
test_cases = [
|
||||
("1985-05-01", "YMD", {datetime.date(1985, 5, 1)}),
|
||||
(
|
||||
"1985-05-01,1991-12-05",
|
||||
"YMD",
|
||||
{datetime.date(1985, 5, 1), datetime.date(1991, 12, 5)},
|
||||
),
|
||||
("2010-12-13", "YMD", {datetime.date(2010, 12, 13)}),
|
||||
("11.01.10", "DMY", {datetime.date(2010, 1, 11)}),
|
||||
(
|
||||
"11.01.2001,15-06-1996",
|
||||
"DMY",
|
||||
{datetime.date(2001, 1, 11), datetime.date(1996, 6, 15)},
|
||||
),
|
||||
]
|
||||
|
||||
self._parse_checker(test_cases)
|
||||
|
||||
|
||||
class TestThreadCalculation(TestCase):
|
||||
def test_workers_threads(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Certain CPU counts
|
||||
WHEN:
|
||||
- Threads per worker is calculated
|
||||
THEN:
|
||||
- Threads per worker less than or equal to CPU count
|
||||
- At least 1 thread per worker
|
||||
"""
|
||||
default_workers = 1
|
||||
|
||||
for i in range(1, 64):
|
||||
with mock.patch(
|
||||
"paperless.settings.multiprocessing.cpu_count",
|
||||
) as cpu_count:
|
||||
cpu_count.return_value = i
|
||||
|
||||
default_threads = default_threads_per_worker(default_workers)
|
||||
|
||||
self.assertGreaterEqual(default_threads, 1)
|
||||
|
||||
self.assertLessEqual(default_workers * default_threads, i)
|
||||
|
||||
|
||||
class TestRedisSocketConversion(TestCase):
|
||||
def test_redis_socket_parsing(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Various Redis connection URI formats
|
||||
WHEN:
|
||||
- The URI is parsed
|
||||
THEN:
|
||||
- Socket based URIs are translated
|
||||
- Non-socket URIs are unchanged
|
||||
- None provided uses default
|
||||
"""
|
||||
|
||||
for input, expected in [
|
||||
# Nothing is set
|
||||
(None, ("redis://localhost:6379", "redis://localhost:6379")),
|
||||
# celery style
|
||||
(
|
||||
"redis+socket:///run/redis/redis.sock",
|
||||
(
|
||||
"redis+socket:///run/redis/redis.sock",
|
||||
"unix:///run/redis/redis.sock",
|
||||
),
|
||||
),
|
||||
# redis-py / channels-redis style
|
||||
(
|
||||
"unix:///run/redis/redis.sock",
|
||||
(
|
||||
"redis+socket:///run/redis/redis.sock",
|
||||
"unix:///run/redis/redis.sock",
|
||||
),
|
||||
),
|
||||
# celery style with db
|
||||
(
|
||||
"redis+socket:///run/redis/redis.sock?virtual_host=5",
|
||||
(
|
||||
"redis+socket:///run/redis/redis.sock?virtual_host=5",
|
||||
"unix:///run/redis/redis.sock?db=5",
|
||||
),
|
||||
),
|
||||
# redis-py / channels-redis style with db
|
||||
(
|
||||
"unix:///run/redis/redis.sock?db=10",
|
||||
(
|
||||
"redis+socket:///run/redis/redis.sock?virtual_host=10",
|
||||
"unix:///run/redis/redis.sock?db=10",
|
||||
),
|
||||
),
|
||||
# Just a host with a port
|
||||
(
|
||||
"redis://myredishost:6379",
|
||||
("redis://myredishost:6379", "redis://myredishost:6379"),
|
||||
),
|
||||
]:
|
||||
result = _parse_redis_url(input)
|
||||
self.assertTupleEqual(expected, result)
|
||||
|
||||
|
||||
class TestCeleryScheduleParsing(TestCase):
|
||||
MAIL_EXPIRE_TIME = 9.0 * 60.0
|
||||
CLASSIFIER_EXPIRE_TIME = 59.0 * 60.0
|
||||
INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
SANITY_EXPIRE_TIME = ((7.0 * 24.0) - 1.0) * 60.0 * 60.0
|
||||
EMPTY_TRASH_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME = 59.0 * 60.0
|
||||
LLM_INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
|
||||
def test_schedule_configuration_default(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- No configured task schedules
|
||||
WHEN:
|
||||
- The celery beat schedule is built
|
||||
THEN:
|
||||
- The default schedule is returned
|
||||
"""
|
||||
schedule = _parse_beat_schedule()
|
||||
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"Check all e-mail accounts": {
|
||||
"task": "paperless_mail.tasks.process_mail_accounts",
|
||||
"schedule": crontab(minute="*/10"),
|
||||
"options": {"expires": self.MAIL_EXPIRE_TIME},
|
||||
},
|
||||
"Train the classifier": {
|
||||
"task": "documents.tasks.train_classifier",
|
||||
"schedule": crontab(minute="5", hour="*/1"),
|
||||
"options": {"expires": self.CLASSIFIER_EXPIRE_TIME},
|
||||
},
|
||||
"Optimize the index": {
|
||||
"task": "documents.tasks.index_optimize",
|
||||
"schedule": crontab(minute=0, hour=0),
|
||||
"options": {"expires": self.INDEX_EXPIRE_TIME},
|
||||
},
|
||||
"Perform sanity check": {
|
||||
"task": "documents.tasks.sanity_check",
|
||||
"schedule": crontab(minute=30, hour=0, day_of_week="sun"),
|
||||
"options": {"expires": self.SANITY_EXPIRE_TIME},
|
||||
},
|
||||
"Empty trash": {
|
||||
"task": "documents.tasks.empty_trash",
|
||||
"schedule": crontab(minute=0, hour="1"),
|
||||
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
||||
},
|
||||
"Check and run scheduled workflows": {
|
||||
"task": "documents.tasks.check_scheduled_workflows",
|
||||
"schedule": crontab(minute="5", hour="*/1"),
|
||||
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
|
||||
},
|
||||
"Rebuild LLM index": {
|
||||
"task": "documents.tasks.llmindex_index",
|
||||
"schedule": crontab(minute=10, hour=2),
|
||||
"options": {
|
||||
"expires": self.LLM_INDEX_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
"Cleanup expired share link bundles": {
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"schedule": crontab(minute=0, hour=2),
|
||||
"options": {
|
||||
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
|
||||
def test_schedule_configuration_changed(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Email task is configured non-default
|
||||
WHEN:
|
||||
- The celery beat schedule is built
|
||||
THEN:
|
||||
- The email task is configured per environment
|
||||
- The default schedule is returned for other tasks
|
||||
"""
|
||||
with mock.patch.dict(
|
||||
os.environ,
|
||||
{"PAPERLESS_EMAIL_TASK_CRON": "*/50 * * * mon"},
|
||||
):
|
||||
schedule = _parse_beat_schedule()
|
||||
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"Check all e-mail accounts": {
|
||||
"task": "paperless_mail.tasks.process_mail_accounts",
|
||||
"schedule": crontab(minute="*/50", day_of_week="mon"),
|
||||
"options": {"expires": self.MAIL_EXPIRE_TIME},
|
||||
},
|
||||
"Train the classifier": {
|
||||
"task": "documents.tasks.train_classifier",
|
||||
"schedule": crontab(minute="5", hour="*/1"),
|
||||
"options": {"expires": self.CLASSIFIER_EXPIRE_TIME},
|
||||
},
|
||||
"Optimize the index": {
|
||||
"task": "documents.tasks.index_optimize",
|
||||
"schedule": crontab(minute=0, hour=0),
|
||||
"options": {"expires": self.INDEX_EXPIRE_TIME},
|
||||
},
|
||||
"Perform sanity check": {
|
||||
"task": "documents.tasks.sanity_check",
|
||||
"schedule": crontab(minute=30, hour=0, day_of_week="sun"),
|
||||
"options": {"expires": self.SANITY_EXPIRE_TIME},
|
||||
},
|
||||
"Empty trash": {
|
||||
"task": "documents.tasks.empty_trash",
|
||||
"schedule": crontab(minute=0, hour="1"),
|
||||
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
||||
},
|
||||
"Check and run scheduled workflows": {
|
||||
"task": "documents.tasks.check_scheduled_workflows",
|
||||
"schedule": crontab(minute="5", hour="*/1"),
|
||||
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
|
||||
},
|
||||
"Rebuild LLM index": {
|
||||
"task": "documents.tasks.llmindex_index",
|
||||
"schedule": crontab(minute=10, hour=2),
|
||||
"options": {
|
||||
"expires": self.LLM_INDEX_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
"Cleanup expired share link bundles": {
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"schedule": crontab(minute=0, hour=2),
|
||||
"options": {
|
||||
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
|
||||
def test_schedule_configuration_disabled(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Search index task is disabled
|
||||
WHEN:
|
||||
- The celery beat schedule is built
|
||||
THEN:
|
||||
- The search index task is not present
|
||||
- The default schedule is returned for other tasks
|
||||
"""
|
||||
with mock.patch.dict(os.environ, {"PAPERLESS_INDEX_TASK_CRON": "disable"}):
|
||||
schedule = _parse_beat_schedule()
|
||||
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"Check all e-mail accounts": {
|
||||
"task": "paperless_mail.tasks.process_mail_accounts",
|
||||
"schedule": crontab(minute="*/10"),
|
||||
"options": {"expires": self.MAIL_EXPIRE_TIME},
|
||||
},
|
||||
"Train the classifier": {
|
||||
"task": "documents.tasks.train_classifier",
|
||||
"schedule": crontab(minute="5", hour="*/1"),
|
||||
"options": {"expires": self.CLASSIFIER_EXPIRE_TIME},
|
||||
},
|
||||
"Perform sanity check": {
|
||||
"task": "documents.tasks.sanity_check",
|
||||
"schedule": crontab(minute=30, hour=0, day_of_week="sun"),
|
||||
"options": {"expires": self.SANITY_EXPIRE_TIME},
|
||||
},
|
||||
"Empty trash": {
|
||||
"task": "documents.tasks.empty_trash",
|
||||
"schedule": crontab(minute=0, hour="1"),
|
||||
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
||||
},
|
||||
"Check and run scheduled workflows": {
|
||||
"task": "documents.tasks.check_scheduled_workflows",
|
||||
"schedule": crontab(minute="5", hour="*/1"),
|
||||
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
|
||||
},
|
||||
"Rebuild LLM index": {
|
||||
"task": "documents.tasks.llmindex_index",
|
||||
"schedule": crontab(minute=10, hour=2),
|
||||
"options": {
|
||||
"expires": self.LLM_INDEX_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
"Cleanup expired share link bundles": {
|
||||
"task": "documents.tasks.cleanup_expired_share_link_bundles",
|
||||
"schedule": crontab(minute=0, hour=2),
|
||||
"options": {
|
||||
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
|
||||
},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
|
||||
def test_schedule_configuration_disabled_all(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- All tasks are disabled
|
||||
WHEN:
|
||||
- The celery beat schedule is built
|
||||
THEN:
|
||||
- No tasks are scheduled
|
||||
"""
|
||||
with mock.patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
"PAPERLESS_EMAIL_TASK_CRON": "disable",
|
||||
"PAPERLESS_TRAIN_TASK_CRON": "disable",
|
||||
"PAPERLESS_SANITY_TASK_CRON": "disable",
|
||||
"PAPERLESS_INDEX_TASK_CRON": "disable",
|
||||
"PAPERLESS_EMPTY_TRASH_TASK_CRON": "disable",
|
||||
"PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON": "disable",
|
||||
"PAPERLESS_LLM_INDEX_TASK_CRON": "disable",
|
||||
"PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON": "disable",
|
||||
},
|
||||
):
|
||||
schedule = _parse_beat_schedule()
|
||||
|
||||
self.assertDictEqual(
|
||||
{},
|
||||
schedule,
|
||||
)
|
||||
|
||||
|
||||
class TestPaperlessURLSettings(TestCase):
|
||||
def test_paperless_url(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- PAPERLESS_URL is set
|
||||
WHEN:
|
||||
- The URL is parsed
|
||||
THEN:
|
||||
- The URL is returned and present in related settings
|
||||
"""
|
||||
with mock.patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
"PAPERLESS_URL": "https://example.com",
|
||||
},
|
||||
):
|
||||
url = _parse_paperless_url()
|
||||
self.assertEqual("https://example.com", url)
|
||||
from django.conf import settings
|
||||
|
||||
self.assertIn(url, settings.CSRF_TRUSTED_ORIGINS)
|
||||
self.assertIn(url, settings.CORS_ALLOWED_ORIGINS)
|
||||
|
||||
|
||||
class TestPathSettings(TestCase):
|
||||
def test_default_paths(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- PAPERLESS_FORCE_SCRIPT_NAME is not set
|
||||
WHEN:
|
||||
- Settings are parsed
|
||||
THEN:
|
||||
- Paths are as expected
|
||||
"""
|
||||
base_paths = _parse_base_paths()
|
||||
self.assertEqual(None, base_paths[0]) # FORCE_SCRIPT_NAME
|
||||
self.assertEqual("/", base_paths[1]) # BASE_URL
|
||||
self.assertEqual("/accounts/login/", base_paths[2]) # LOGIN_URL
|
||||
self.assertEqual("/dashboard", base_paths[3]) # LOGIN_REDIRECT_URL
|
||||
self.assertEqual(
|
||||
"/accounts/login/?loggedout=1",
|
||||
base_paths[4],
|
||||
) # LOGOUT_REDIRECT_URL
|
||||
|
||||
@mock.patch("os.environ", {"PAPERLESS_FORCE_SCRIPT_NAME": "/paperless"})
|
||||
def test_subpath(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- PAPERLESS_FORCE_SCRIPT_NAME is set
|
||||
WHEN:
|
||||
- Settings are parsed
|
||||
THEN:
|
||||
- The path is returned and present in related settings
|
||||
"""
|
||||
base_paths = _parse_base_paths()
|
||||
self.assertEqual("/paperless", base_paths[0]) # FORCE_SCRIPT_NAME
|
||||
self.assertEqual("/paperless/", base_paths[1]) # BASE_URL
|
||||
self.assertEqual("/paperless/accounts/login/", base_paths[2]) # LOGIN_URL
|
||||
self.assertEqual("/paperless/dashboard", base_paths[3]) # LOGIN_REDIRECT_URL
|
||||
self.assertEqual(
|
||||
"/paperless/accounts/login/?loggedout=1",
|
||||
base_paths[4],
|
||||
) # LOGOUT_REDIRECT_URL
|
||||
|
||||
@mock.patch(
|
||||
"os.environ",
|
||||
{
|
||||
"PAPERLESS_FORCE_SCRIPT_NAME": "/paperless",
|
||||
"PAPERLESS_LOGOUT_REDIRECT_URL": "/foobar/",
|
||||
},
|
||||
)
|
||||
def test_subpath_with_explicit_logout_url(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- PAPERLESS_FORCE_SCRIPT_NAME is set and so is PAPERLESS_LOGOUT_REDIRECT_URL
|
||||
WHEN:
|
||||
- Settings are parsed
|
||||
THEN:
|
||||
- The correct logout redirect URL is returned
|
||||
"""
|
||||
base_paths = _parse_base_paths()
|
||||
self.assertEqual("/paperless/", base_paths[1]) # BASE_URL
|
||||
self.assertEqual("/foobar/", base_paths[4]) # LOGOUT_REDIRECT_URL
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("languages", "expected"),
|
||||
[
|
||||
("de", ["de"]),
|
||||
("zh", ["zh"]),
|
||||
("fr+en", ["fr", "en"]),
|
||||
# Locales must be supported
|
||||
("en-001+fr-CA", ["en-001", "fr-CA"]),
|
||||
("en-001+fr", ["en-001", "fr"]),
|
||||
# Special case for Chinese: variants seem to miss some dates,
|
||||
# so we always add "zh" as a fallback.
|
||||
("en+zh-Hans-HK", ["en", "zh-Hans-HK", "zh"]),
|
||||
("en+zh-Hans", ["en", "zh-Hans", "zh"]),
|
||||
("en+zh-Hans+zh-Hant", ["en", "zh-Hans", "zh-Hant", "zh"]),
|
||||
],
|
||||
)
|
||||
def test_parser_date_parser_languages(languages, expected) -> None:
|
||||
assert sorted(_parse_dateparser_languages(languages)) == sorted(expected)
|
||||
@@ -9,35 +9,50 @@ from paperless.utils import ocr_to_dateparser_languages
|
||||
@pytest.mark.parametrize(
|
||||
("ocr_language", "expected"),
|
||||
[
|
||||
# One language
|
||||
("eng", ["en"]),
|
||||
# Multiple languages
|
||||
("fra+ita+lao", ["fr", "it", "lo"]),
|
||||
# Languages that don't have a two-letter equivalent
|
||||
("fil", ["fil"]),
|
||||
# Languages with a script part supported by dateparser
|
||||
("aze_cyrl+srp_latn", ["az-Cyrl", "sr-Latn"]),
|
||||
# Languages with a script part not supported by dateparser
|
||||
# In this case, default to the language without script
|
||||
("deu_frak", ["de"]),
|
||||
# Traditional and simplified chinese don't have the same name in dateparser,
|
||||
# so they're converted to the general chinese language
|
||||
("chi_tra+chi_sim", ["zh"]),
|
||||
# If a language is not supported by dateparser, fallback to the supported ones
|
||||
("eng+unsupported_language+por", ["en", "pt"]),
|
||||
# If no language is supported, fallback to default
|
||||
("unsupported1+unsupported2", []),
|
||||
# Duplicate languages, should not duplicate in result
|
||||
("eng+eng", ["en"]),
|
||||
# Language with script, but script is not mapped
|
||||
("ita_unknownscript", ["it"]),
|
||||
pytest.param("eng", ["en"], id="single-language"),
|
||||
pytest.param("fra+ita+lao", ["fr", "it", "lo"], id="multiple-languages"),
|
||||
pytest.param("fil", ["fil"], id="no-two-letter-equivalent"),
|
||||
pytest.param(
|
||||
"aze_cyrl+srp_latn",
|
||||
["az-Cyrl", "sr-Latn"],
|
||||
id="script-supported-by-dateparser",
|
||||
),
|
||||
pytest.param(
|
||||
"deu_frak",
|
||||
["de"],
|
||||
id="script-not-supported-falls-back-to-language",
|
||||
),
|
||||
pytest.param(
|
||||
"chi_tra+chi_sim",
|
||||
["zh"],
|
||||
id="chinese-variants-collapse-to-general",
|
||||
),
|
||||
pytest.param(
|
||||
"eng+unsupported_language+por",
|
||||
["en", "pt"],
|
||||
id="unsupported-language-skipped",
|
||||
),
|
||||
pytest.param(
|
||||
"unsupported1+unsupported2",
|
||||
[],
|
||||
id="all-unsupported-returns-empty",
|
||||
),
|
||||
pytest.param("eng+eng", ["en"], id="duplicates-deduplicated"),
|
||||
pytest.param(
|
||||
"ita_unknownscript",
|
||||
["it"],
|
||||
id="unknown-script-falls-back-to-language",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_ocr_to_dateparser_languages(ocr_language, expected):
|
||||
def test_ocr_to_dateparser_languages(ocr_language: str, expected: list[str]) -> None:
|
||||
assert sorted(ocr_to_dateparser_languages(ocr_language)) == sorted(expected)
|
||||
|
||||
|
||||
def test_ocr_to_dateparser_languages_exception(monkeypatch, caplog):
|
||||
def test_ocr_to_dateparser_languages_exception(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
# Patch LocaleDataLoader.get_locale_map to raise an exception
|
||||
class DummyLoader:
|
||||
def get_locale_map(self, locales=None):
|
||||
|
||||
@@ -1,24 +1,31 @@
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from django.test import override_settings
|
||||
from django.test import Client
|
||||
from pytest_django.fixtures import SettingsWrapper
|
||||
|
||||
|
||||
def test_favicon_view(client):
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
static_dir = Path(tmpdir)
|
||||
favicon_path = static_dir / "paperless" / "img" / "favicon.ico"
|
||||
favicon_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
favicon_path.write_bytes(b"FAKE ICON DATA")
|
||||
def test_favicon_view(
|
||||
client: Client,
|
||||
tmp_path: Path,
|
||||
settings: SettingsWrapper,
|
||||
) -> None:
|
||||
favicon_path = tmp_path / "paperless" / "img" / "favicon.ico"
|
||||
favicon_path.parent.mkdir(parents=True)
|
||||
favicon_path.write_bytes(b"FAKE ICON DATA")
|
||||
|
||||
with override_settings(STATIC_ROOT=static_dir):
|
||||
response = client.get("/favicon.ico")
|
||||
assert response.status_code == 200
|
||||
assert response["Content-Type"] == "image/x-icon"
|
||||
assert b"".join(response.streaming_content) == b"FAKE ICON DATA"
|
||||
settings.STATIC_ROOT = tmp_path
|
||||
|
||||
response = client.get("/favicon.ico")
|
||||
assert response.status_code == 200
|
||||
assert response["Content-Type"] == "image/x-icon"
|
||||
assert b"".join(response.streaming_content) == b"FAKE ICON DATA"
|
||||
|
||||
|
||||
def test_favicon_view_missing_file(client):
|
||||
with override_settings(STATIC_ROOT=Path(tempfile.mkdtemp())):
|
||||
response = client.get("/favicon.ico")
|
||||
assert response.status_code == 404
|
||||
def test_favicon_view_missing_file(
|
||||
client: Client,
|
||||
tmp_path: Path,
|
||||
settings: SettingsWrapper,
|
||||
) -> None:
|
||||
settings.STATIC_ROOT = tmp_path
|
||||
response = client.get("/favicon.ico")
|
||||
assert response.status_code == 404
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from llama_index.core.bridge.pydantic import BaseModel
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class DocumentClassifierSchema(BaseModel):
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from llama_index.core import VectorStoreIndex
|
||||
from llama_index.core.prompts import PromptTemplate
|
||||
from llama_index.core.query_engine import RetrieverQueryEngine
|
||||
|
||||
from documents.models import Document
|
||||
from paperless_ai.client import AIClient
|
||||
from paperless_ai.indexing import load_or_build_index
|
||||
@@ -14,15 +10,13 @@ logger = logging.getLogger("paperless_ai.chat")
|
||||
MAX_SINGLE_DOC_CONTEXT_CHARS = 15000
|
||||
SINGLE_DOC_SNIPPET_CHARS = 800
|
||||
|
||||
CHAT_PROMPT_TMPL = PromptTemplate(
|
||||
template="""Context information is below.
|
||||
CHAT_PROMPT_TMPL = """Context information is below.
|
||||
---------------------
|
||||
{context_str}
|
||||
---------------------
|
||||
Given the context information and not prior knowledge, answer the query.
|
||||
Query: {query_str}
|
||||
Answer:""",
|
||||
)
|
||||
Answer:"""
|
||||
|
||||
|
||||
def stream_chat_with_documents(query_str: str, documents: list[Document]):
|
||||
@@ -43,6 +37,10 @@ def stream_chat_with_documents(query_str: str, documents: list[Document]):
|
||||
yield "Sorry, I couldn't find any content to answer your question."
|
||||
return
|
||||
|
||||
from llama_index.core import VectorStoreIndex
|
||||
from llama_index.core.prompts import PromptTemplate
|
||||
from llama_index.core.query_engine import RetrieverQueryEngine
|
||||
|
||||
local_index = VectorStoreIndex(nodes=nodes)
|
||||
retriever = local_index.as_retriever(
|
||||
similarity_top_k=3 if len(documents) == 1 else 5,
|
||||
@@ -85,7 +83,8 @@ def stream_chat_with_documents(query_str: str, documents: list[Document]):
|
||||
for node in top_nodes
|
||||
)
|
||||
|
||||
prompt = CHAT_PROMPT_TMPL.partial_format(
|
||||
prompt_template = PromptTemplate(template=CHAT_PROMPT_TMPL)
|
||||
prompt = prompt_template.partial_format(
|
||||
context_str=context,
|
||||
query_str=query_str,
|
||||
).format(llm=client.llm)
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from llama_index.core.llms import ChatMessage
|
||||
from llama_index.core.program.function_program import get_function_tool
|
||||
from llama_index.llms.ollama import Ollama
|
||||
from llama_index.llms.openai import OpenAI
|
||||
if TYPE_CHECKING:
|
||||
from llama_index.core.llms import ChatMessage
|
||||
from llama_index.llms.ollama import Ollama
|
||||
from llama_index.llms.openai import OpenAI
|
||||
|
||||
from paperless.config import AIConfig
|
||||
from paperless_ai.base_model import DocumentClassifierSchema
|
||||
@@ -20,14 +21,18 @@ class AIClient:
|
||||
self.settings = AIConfig()
|
||||
self.llm = self.get_llm()
|
||||
|
||||
def get_llm(self) -> Ollama | OpenAI:
|
||||
def get_llm(self) -> "Ollama | OpenAI":
|
||||
if self.settings.llm_backend == "ollama":
|
||||
from llama_index.llms.ollama import Ollama
|
||||
|
||||
return Ollama(
|
||||
model=self.settings.llm_model or "llama3.1",
|
||||
base_url=self.settings.llm_endpoint or "http://localhost:11434",
|
||||
request_timeout=120,
|
||||
)
|
||||
elif self.settings.llm_backend == "openai":
|
||||
from llama_index.llms.openai import OpenAI
|
||||
|
||||
return OpenAI(
|
||||
model=self.settings.llm_model or "gpt-3.5-turbo",
|
||||
api_base=self.settings.llm_endpoint or None,
|
||||
@@ -43,6 +48,9 @@ class AIClient:
|
||||
self.settings.llm_model,
|
||||
)
|
||||
|
||||
from llama_index.core.llms import ChatMessage
|
||||
from llama_index.core.program.function_program import get_function_tool
|
||||
|
||||
user_msg = ChatMessage(role="user", content=prompt)
|
||||
tool = get_function_tool(DocumentClassifierSchema)
|
||||
result = self.llm.chat_with_tools(
|
||||
@@ -58,7 +66,7 @@ class AIClient:
|
||||
parsed = DocumentClassifierSchema(**tool_calls[0].tool_kwargs)
|
||||
return parsed.model_dump()
|
||||
|
||||
def run_chat(self, messages: list[ChatMessage]) -> str:
|
||||
def run_chat(self, messages: list["ChatMessage"]) -> str:
|
||||
logger.debug(
|
||||
"Running chat query against %s with model %s",
|
||||
self.settings.llm_backend,
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import json
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from llama_index.core.base.embeddings.base import BaseEmbedding
|
||||
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
||||
from llama_index.embeddings.openai import OpenAIEmbedding
|
||||
from llama_index.core.base.embeddings.base import BaseEmbedding
|
||||
|
||||
from documents.models import Document
|
||||
from documents.models import Note
|
||||
@@ -15,17 +14,21 @@ from paperless.config import AIConfig
|
||||
from paperless.models import LLMEmbeddingBackend
|
||||
|
||||
|
||||
def get_embedding_model() -> BaseEmbedding:
|
||||
def get_embedding_model() -> "BaseEmbedding":
|
||||
config = AIConfig()
|
||||
|
||||
match config.llm_embedding_backend:
|
||||
case LLMEmbeddingBackend.OPENAI:
|
||||
from llama_index.embeddings.openai import OpenAIEmbedding
|
||||
|
||||
return OpenAIEmbedding(
|
||||
model=config.llm_embedding_model or "text-embedding-3-small",
|
||||
api_key=config.llm_api_key,
|
||||
api_base=config.llm_endpoint or None,
|
||||
)
|
||||
case LLMEmbeddingBackend.HUGGINGFACE:
|
||||
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
||||
|
||||
return HuggingFaceEmbedding(
|
||||
model_name=config.llm_embedding_model
|
||||
or "sentence-transformers/all-MiniLM-L6-v2",
|
||||
|
||||
@@ -4,26 +4,12 @@ from collections.abc import Callable
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
|
||||
import faiss
|
||||
import llama_index.core.settings as llama_settings
|
||||
from celery import states
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from llama_index.core import Document as LlamaDocument
|
||||
from llama_index.core import StorageContext
|
||||
from llama_index.core import VectorStoreIndex
|
||||
from llama_index.core import load_index_from_storage
|
||||
from llama_index.core.indices.prompt_helper import PromptHelper
|
||||
from llama_index.core.node_parser import SimpleNodeParser
|
||||
from llama_index.core.prompts import PromptTemplate
|
||||
from llama_index.core.retrievers import VectorIndexRetriever
|
||||
from llama_index.core.schema import BaseNode
|
||||
from llama_index.core.storage.docstore import SimpleDocumentStore
|
||||
from llama_index.core.storage.index_store import SimpleIndexStore
|
||||
from llama_index.core.text_splitter import TokenTextSplitter
|
||||
from llama_index.vector_stores.faiss import FaissVectorStore
|
||||
|
||||
from documents.models import Document
|
||||
from documents.models import PaperlessTask
|
||||
@@ -34,6 +20,10 @@ from paperless_ai.embedding import get_embedding_model
|
||||
_T = TypeVar("_T")
|
||||
IterWrapper = Callable[[Iterable[_T]], Iterable[_T]]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from llama_index.core import VectorStoreIndex
|
||||
from llama_index.core.schema import BaseNode
|
||||
|
||||
|
||||
def _identity(iterable: Iterable[_T]) -> Iterable[_T]:
|
||||
return iterable
|
||||
@@ -75,12 +65,23 @@ def get_or_create_storage_context(*, rebuild=False):
|
||||
settings.LLM_INDEX_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if rebuild or not settings.LLM_INDEX_DIR.exists():
|
||||
import faiss
|
||||
from llama_index.core import StorageContext
|
||||
from llama_index.core.storage.docstore import SimpleDocumentStore
|
||||
from llama_index.core.storage.index_store import SimpleIndexStore
|
||||
from llama_index.vector_stores.faiss import FaissVectorStore
|
||||
|
||||
embedding_dim = get_embedding_dim()
|
||||
faiss_index = faiss.IndexFlatL2(embedding_dim)
|
||||
vector_store = FaissVectorStore(faiss_index=faiss_index)
|
||||
docstore = SimpleDocumentStore()
|
||||
index_store = SimpleIndexStore()
|
||||
else:
|
||||
from llama_index.core import StorageContext
|
||||
from llama_index.core.storage.docstore import SimpleDocumentStore
|
||||
from llama_index.core.storage.index_store import SimpleIndexStore
|
||||
from llama_index.vector_stores.faiss import FaissVectorStore
|
||||
|
||||
vector_store = FaissVectorStore.from_persist_dir(settings.LLM_INDEX_DIR)
|
||||
docstore = SimpleDocumentStore.from_persist_dir(settings.LLM_INDEX_DIR)
|
||||
index_store = SimpleIndexStore.from_persist_dir(settings.LLM_INDEX_DIR)
|
||||
@@ -93,7 +94,7 @@ def get_or_create_storage_context(*, rebuild=False):
|
||||
)
|
||||
|
||||
|
||||
def build_document_node(document: Document) -> list[BaseNode]:
|
||||
def build_document_node(document: Document) -> list["BaseNode"]:
|
||||
"""
|
||||
Given a Document, returns parsed Nodes ready for indexing.
|
||||
"""
|
||||
@@ -112,6 +113,9 @@ def build_document_node(document: Document) -> list[BaseNode]:
|
||||
"added": document.added.isoformat() if document.added else None,
|
||||
"modified": document.modified.isoformat(),
|
||||
}
|
||||
from llama_index.core import Document as LlamaDocument
|
||||
from llama_index.core.node_parser import SimpleNodeParser
|
||||
|
||||
doc = LlamaDocument(text=text, metadata=metadata)
|
||||
parser = SimpleNodeParser()
|
||||
return parser.get_nodes_from_documents([doc])
|
||||
@@ -122,6 +126,10 @@ def load_or_build_index(nodes=None):
|
||||
Load an existing VectorStoreIndex if present,
|
||||
or build a new one using provided nodes if storage is empty.
|
||||
"""
|
||||
import llama_index.core.settings as llama_settings
|
||||
from llama_index.core import VectorStoreIndex
|
||||
from llama_index.core import load_index_from_storage
|
||||
|
||||
embed_model = get_embedding_model()
|
||||
llama_settings.Settings.embed_model = embed_model
|
||||
storage_context = get_or_create_storage_context()
|
||||
@@ -143,7 +151,7 @@ def load_or_build_index(nodes=None):
|
||||
)
|
||||
|
||||
|
||||
def remove_document_docstore_nodes(document: Document, index: VectorStoreIndex):
|
||||
def remove_document_docstore_nodes(document: Document, index: "VectorStoreIndex"):
|
||||
"""
|
||||
Removes existing documents from docstore for a given document from the index.
|
||||
This is necessary because FAISS IndexFlatL2 is append-only.
|
||||
@@ -174,6 +182,8 @@ def update_llm_index(
|
||||
"""
|
||||
Rebuild or update the LLM index.
|
||||
"""
|
||||
from llama_index.core import VectorStoreIndex
|
||||
|
||||
nodes = []
|
||||
|
||||
documents = Document.objects.all()
|
||||
@@ -187,6 +197,8 @@ def update_llm_index(
|
||||
(settings.LLM_INDEX_DIR / "meta.json").unlink(missing_ok=True)
|
||||
# Rebuild index from scratch
|
||||
logger.info("Rebuilding LLM index.")
|
||||
import llama_index.core.settings as llama_settings
|
||||
|
||||
embed_model = get_embedding_model()
|
||||
llama_settings.Settings.embed_model = embed_model
|
||||
storage_context = get_or_create_storage_context(rebuild=True)
|
||||
@@ -271,6 +283,10 @@ def llm_index_remove_document(document: Document):
|
||||
|
||||
|
||||
def truncate_content(content: str) -> str:
|
||||
from llama_index.core.indices.prompt_helper import PromptHelper
|
||||
from llama_index.core.prompts import PromptTemplate
|
||||
from llama_index.core.text_splitter import TokenTextSplitter
|
||||
|
||||
prompt_helper = PromptHelper(
|
||||
context_window=8192,
|
||||
num_output=512,
|
||||
@@ -315,6 +331,8 @@ def query_similar_documents(
|
||||
else None
|
||||
)
|
||||
|
||||
from llama_index.core.retrievers import VectorIndexRetriever
|
||||
|
||||
retriever = VectorIndexRetriever(
|
||||
index=index,
|
||||
similarity_top_k=top_k,
|
||||
|
||||
@@ -181,11 +181,11 @@ def test_load_or_build_index_builds_when_nodes_given(
|
||||
) -> None:
|
||||
with (
|
||||
patch(
|
||||
"paperless_ai.indexing.load_index_from_storage",
|
||||
"llama_index.core.load_index_from_storage",
|
||||
side_effect=ValueError("Index not found"),
|
||||
),
|
||||
patch(
|
||||
"paperless_ai.indexing.VectorStoreIndex",
|
||||
"llama_index.core.VectorStoreIndex",
|
||||
return_value=MagicMock(),
|
||||
) as mock_index_cls,
|
||||
patch(
|
||||
@@ -206,7 +206,7 @@ def test_load_or_build_index_raises_exception_when_no_nodes(
|
||||
) -> None:
|
||||
with (
|
||||
patch(
|
||||
"paperless_ai.indexing.load_index_from_storage",
|
||||
"llama_index.core.load_index_from_storage",
|
||||
side_effect=ValueError("Index not found"),
|
||||
),
|
||||
patch(
|
||||
@@ -225,11 +225,11 @@ def test_load_or_build_index_succeeds_when_nodes_given(
|
||||
) -> None:
|
||||
with (
|
||||
patch(
|
||||
"paperless_ai.indexing.load_index_from_storage",
|
||||
"llama_index.core.load_index_from_storage",
|
||||
side_effect=ValueError("Index not found"),
|
||||
),
|
||||
patch(
|
||||
"paperless_ai.indexing.VectorStoreIndex",
|
||||
"llama_index.core.VectorStoreIndex",
|
||||
return_value=MagicMock(),
|
||||
) as mock_index_cls,
|
||||
patch(
|
||||
@@ -334,7 +334,7 @@ def test_query_similar_documents(
|
||||
patch(
|
||||
"paperless_ai.indexing.vector_store_file_exists",
|
||||
) as mock_vector_store_exists,
|
||||
patch("paperless_ai.indexing.VectorIndexRetriever") as mock_retriever_cls,
|
||||
patch("llama_index.core.retrievers.VectorIndexRetriever") as mock_retriever_cls,
|
||||
patch("paperless_ai.indexing.Document.objects.filter") as mock_filter,
|
||||
):
|
||||
mock_storage.return_value = MagicMock()
|
||||
|
||||
@@ -45,7 +45,7 @@ def test_stream_chat_with_one_document_full_content(mock_document) -> None:
|
||||
patch("paperless_ai.chat.AIClient") as mock_client_cls,
|
||||
patch("paperless_ai.chat.load_or_build_index") as mock_load_index,
|
||||
patch(
|
||||
"paperless_ai.chat.RetrieverQueryEngine.from_args",
|
||||
"llama_index.core.query_engine.RetrieverQueryEngine.from_args",
|
||||
) as mock_query_engine_cls,
|
||||
):
|
||||
mock_client = MagicMock()
|
||||
@@ -76,7 +76,7 @@ def test_stream_chat_with_multiple_documents_retrieval(patch_embed_nodes) -> Non
|
||||
patch("paperless_ai.chat.AIClient") as mock_client_cls,
|
||||
patch("paperless_ai.chat.load_or_build_index") as mock_load_index,
|
||||
patch(
|
||||
"paperless_ai.chat.RetrieverQueryEngine.from_args",
|
||||
"llama_index.core.query_engine.RetrieverQueryEngine.from_args",
|
||||
) as mock_query_engine_cls,
|
||||
patch.object(VectorStoreIndex, "as_retriever") as mock_as_retriever,
|
||||
):
|
||||
|
||||
@@ -18,13 +18,13 @@ def mock_ai_config():
|
||||
|
||||
@pytest.fixture
|
||||
def mock_ollama_llm():
|
||||
with patch("paperless_ai.client.Ollama") as MockOllama:
|
||||
with patch("llama_index.llms.ollama.Ollama") as MockOllama:
|
||||
yield MockOllama
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_openai_llm():
|
||||
with patch("paperless_ai.client.OpenAI") as MockOpenAI:
|
||||
with patch("llama_index.llms.openai.OpenAI") as MockOpenAI:
|
||||
yield MockOpenAI
|
||||
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ def test_get_embedding_model_openai(mock_ai_config):
|
||||
mock_ai_config.return_value.llm_api_key = "test_api_key"
|
||||
mock_ai_config.return_value.llm_endpoint = "http://test-url"
|
||||
|
||||
with patch("paperless_ai.embedding.OpenAIEmbedding") as MockOpenAIEmbedding:
|
||||
with patch("llama_index.embeddings.openai.OpenAIEmbedding") as MockOpenAIEmbedding:
|
||||
model = get_embedding_model()
|
||||
MockOpenAIEmbedding.assert_called_once_with(
|
||||
model="text-embedding-3-small",
|
||||
@@ -84,7 +84,7 @@ def test_get_embedding_model_huggingface(mock_ai_config):
|
||||
)
|
||||
|
||||
with patch(
|
||||
"paperless_ai.embedding.HuggingFaceEmbedding",
|
||||
"llama_index.embeddings.huggingface.HuggingFaceEmbedding",
|
||||
) as MockHuggingFaceEmbedding:
|
||||
model = get_embedding_model()
|
||||
MockHuggingFaceEmbedding.assert_called_once_with(
|
||||
|
||||
69
uv.lock
generated
69
uv.lock
generated
@@ -1748,6 +1748,73 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ijson"
|
||||
version = "3.5.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f4/57/60d1a6a512f2f0508d0bc8b4f1cc5616fd3196619b66bd6a01f9155a1292/ijson-3.5.0.tar.gz", hash = "sha256:94688760720e3f5212731b3cb8d30267f9a045fb38fb3870254e7b9504246f31", size = 68658, upload-time = "2026-02-24T03:58:30.974Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/65/da/644343198abca5e0f6e2486063f8d8f3c443ca0ef5e5c890e51ef6032e33/ijson-3.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5616311404b858d32740b7ad8b9a799c62165f5ecb85d0a8ed16c21665a90533", size = 88964, upload-time = "2026-02-24T03:56:53.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/63/8621190aa2baf96156dfd4c632b6aa9f1464411e50b98750c09acc0505ea/ijson-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e9733f94029dd41702d573ef64752e2556e72aea14623d6dbb7a44ca1ccf30fd", size = 60582, upload-time = "2026-02-24T03:56:54.261Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/31/6a3f041fdd17dacff33b7d7d3ba3df6dca48740108340c6042f974b2ad20/ijson-3.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:db8398c6721b98412a4f618da8022550c8b9c5d9214040646071b5deb4d4a393", size = 60632, upload-time = "2026-02-24T03:56:55.159Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/68/474541998abbdecfd46a744536878335de89aceb9f085bff1aaf35575ceb/ijson-3.5.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c061314845c08163b1784b6076ea5f075372461a32e6916f4e5f211fd4130b64", size = 131988, upload-time = "2026-02-24T03:56:56.35Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/32/e05ff8b72a44fe9d192f41c5dcbc35cfa87efc280cdbfe539ffaf4a7535e/ijson-3.5.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1111a1c5ac79119c5d6e836f900c1a53844b50a18af38311baa6bb61e2645aca", size = 138669, upload-time = "2026-02-24T03:56:57.555Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/b5/955a83b031102c7a602e2c06d03aff0a0e584212f09edb94ccc754d203ac/ijson-3.5.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e74aff8c681c24002b61b1822f9511d4c384f324f7dbc08c78538e01fdc9fcb", size = 135093, upload-time = "2026-02-24T03:56:59.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/f2/30250cfcb4d2766669b31f6732689aab2bb91de426a15a3ebe482df7ee48/ijson-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:739a7229b1b0cc5f7e2785a6e7a5fc915e850d3fed9588d0e89a09f88a417253", size = 138715, upload-time = "2026-02-24T03:57:00.491Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/05/785a145d7e75e04e04480d59b6323cd4b1d9013a6cd8643fa635fbc93490/ijson-3.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ef88712160360cab3ca6471a4e5418243f8b267cf1fe1620879d1b5558babc71", size = 133194, upload-time = "2026-02-24T03:57:01.759Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/eb/80d6f8a748dead4034cea0939494a67d10ccf88d6413bf6e860393139676/ijson-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ca0d1b6b5f8166a6248f4309497585fb8553b04bc8179a0260fad636cfdb798", size = 135588, upload-time = "2026-02-24T03:57:03.131Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/17/9c63c7688025f3a8c47ea717b8306649c8c7244e49e20a2be4e3515dc75c/ijson-3.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1ebefbe149a6106cc848a3eaf536af51a9b5ccc9082de801389f152dba6ab755", size = 88536, upload-time = "2026-02-24T03:57:06.809Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/dd/e15c2400244c117b06585452ebc63ae254f5a6964f712306afd1422daae0/ijson-3.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:19e30d9f00f82e64de689c0b8651b9cfed879c184b139d7e1ea5030cec401c21", size = 60499, upload-time = "2026-02-24T03:57:09.155Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/a9/bf4fe3538a0c965f16b406f180a06105b875da83f0743e36246be64ef550/ijson-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a04a33ee78a6f27b9b8528c1ca3c207b1df3b8b867a4cf2fcc4109986f35c227", size = 60330, upload-time = "2026-02-24T03:57:10.574Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/76/6f91bdb019dd978fce1bc5ea1cd620cfc096d258126c91db2c03a20a7f34/ijson-3.5.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7d48dc2984af02eb3c56edfb3f13b3f62f2f3e4fe36f058c8cfc75d93adf4fed", size = 138977, upload-time = "2026-02-24T03:57:11.932Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/be/bbc983059e48a54b0121ee60042979faed7674490bbe7b2c41560db3f436/ijson-3.5.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1e73a44844d9adbca9cf2c4132cd875933e83f3d4b23881fcaf82be83644c7d", size = 149785, upload-time = "2026-02-24T03:57:13.255Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/81/2fee58f9024a3449aee83edfa7167fb5ccd7e1af2557300e28531bb68e16/ijson-3.5.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7389a56b8562a19948bdf1d7bae3a2edc8c7f86fb59834dcb1c4c722818e645a", size = 149729, upload-time = "2026-02-24T03:57:14.191Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/56/f1706761fcc096c9d414b3dcd000b1e6e5c24364c21cfba429837f98ee8d/ijson-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3176f23f8ebec83f374ed0c3b4e5a0c4db7ede54c005864efebbed46da123608", size = 150697, upload-time = "2026-02-24T03:57:15.855Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/6e/ee0d9c875a0193b632b3e9ccd1b22a50685fb510256ad57ba483b6529f77/ijson-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6babd88e508630c6ef86c9bebaaf13bb2fb8ec1d8f8868773a03c20253f599bc", size = 142873, upload-time = "2026-02-24T03:57:16.831Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/bf/f9d4399d0e6e3fd615035290a71e97c843f17f329b43638c0a01cf112d73/ijson-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dc1b3836b174b6db2fa8319f1926fb5445abd195dc963368092103f8579cb8ed", size = 151583, upload-time = "2026-02-24T03:57:17.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/71/d67e764a712c3590627480643a3b51efcc3afa4ef3cb54ee4c989073c97e/ijson-3.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e9cedc10e40dd6023c351ed8bfc7dcfce58204f15c321c3c1546b9c7b12562a4", size = 88544, upload-time = "2026-02-24T03:57:21.293Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/39/f1c299371686153fa3cf5c0736b96247a87a1bee1b7145e6d21f359c505a/ijson-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3647649f782ee06c97490b43680371186651f3f69bebe64c6083ee7615d185e5", size = 60495, upload-time = "2026-02-24T03:57:22.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/94/b1438e204d75e01541bebe3e668fe3e68612d210e9931ae1611062dd0a56/ijson-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:90e74be1dce05fce73451c62d1118671f78f47c9f6be3991c82b91063bf01fc9", size = 60325, upload-time = "2026-02-24T03:57:23.332Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/e2/4aa9c116fa86cc8b0f574f3c3a47409edc1cd4face05d0e589a5a176b05d/ijson-3.5.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:78e9ad73e7be2dd80627504bd5cbf512348c55ce2c06e362ed7683b5220e8568", size = 138774, upload-time = "2026-02-24T03:57:24.683Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/d2/738b88752a70c3be1505faa4dcd7110668c2712e582a6a36488ed1e295d4/ijson-3.5.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9577449313cc94be89a4fe4b3e716c65f09cc19636d5a6b2861c4e80dddebd58", size = 149820, upload-time = "2026-02-24T03:57:26.062Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/df/0b3ab9f393ca8f72ea03bc896ba9fdc987e90ae08cdb51c32a4ee0c14d5e/ijson-3.5.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e4c1178fb50aff5f5701a30a5152ead82a14e189ce0f6102fa1b5f10b2f54ff", size = 149747, upload-time = "2026-02-24T03:57:27.308Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/a3/b0037119f75131b78cb00acc2657b1a9d0435475f1f2c5f8f5a170b66b9c/ijson-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0eb402ab026ffb37a918d75af2b7260fe6cfbce13232cc83728a714dd30bd81d", size = 151027, upload-time = "2026-02-24T03:57:28.522Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/a0/cb344de1862bf09d8f769c9d25c944078c87dd59a1b496feec5ad96309a4/ijson-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5b08ee08355f9f729612a8eb9bf69cc14f9310c3b2a487c6f1c3c65d85216ec4", size = 142996, upload-time = "2026-02-24T03:57:29.774Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/32/a8ffd67182e02ea61f70f62daf43ded4fa8a830a2520a851d2782460aba8/ijson-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bda62b6d48442903e7bf56152108afb7f0f1293c2b9bef2f2c369defea76ab18", size = 152068, upload-time = "2026-02-24T03:57:30.969Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/65/13e2492d17e19a2084523e18716dc2809159f2287fd2700c735f311e76c4/ijson-3.5.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4d4b0cd676b8c842f7648c1a783448fac5cd3b98289abd83711b3e275e143524", size = 93019, upload-time = "2026-02-24T03:57:33.976Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/92/483fc97ece0c3f1cecabf48f6a7a36e89d19369eec462faaeaa34c788992/ijson-3.5.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:252dec3680a48bb82d475e36b4ae1b3a9d7eb690b951bb98a76c5fe519e30188", size = 62714, upload-time = "2026-02-24T03:57:34.819Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/88/793fe020a0fe9d9eed4c285cf4a5cfdb0a935708b3bde0d72f35c794b513/ijson-3.5.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:aa1b5dca97d323931fde2501172337384c958914d81a9dac7f00f0d4bfc76bc7", size = 62460, upload-time = "2026-02-24T03:57:35.874Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/69/f1a2690aa8d4df1f4e262b385e65a933ffdc250b091531bac9a449c19e16/ijson-3.5.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7a5ec7fd86d606094bba6f6f8f87494897102fa4584ef653f3005c51a784c320", size = 199273, upload-time = "2026-02-24T03:57:37.07Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/a2/f1346d5299e79b988ab472dc773d5381ec2d57c23cb2f1af3ede4a810e62/ijson-3.5.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:009f41443e1521847701c6d87fa3923c0b1961be3c7e7de90947c8cb92ea7c44", size = 216884, upload-time = "2026-02-24T03:57:38.346Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/3c/8b637e869be87799e6c2c3c275a30a546f086b1aed77e2b7f11512168c5a/ijson-3.5.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e4c3651d1f9fe2839a93fdf8fd1d5ca3a54975349894249f3b1b572bcc4bd577", size = 207306, upload-time = "2026-02-24T03:57:39.718Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/7c/18b1c1df6951ca056782d7580ec40cea4ff9a27a0947d92640d1cc8c4ae3/ijson-3.5.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:945b7abcfcfeae2cde17d8d900870f03536494245dda7ad4f8d056faa303256c", size = 211364, upload-time = "2026-02-24T03:57:40.953Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/55/e795812e82851574a9dba8a53fde045378f531ef14110c6fb55dbd23b443/ijson-3.5.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0574b0a841ff97495c13e9d7260fbf3d85358b061f540c52a123db9dbbaa2ed6", size = 200608, upload-time = "2026-02-24T03:57:42.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/cd/013c85b4749b57a4cb4c2670014d1b32b8db4ab1a7be92ea7aeb5d7fe7b5/ijson-3.5.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f969ffb2b89c5cdf686652d7fb66252bc72126fa54d416317411497276056a18", size = 205127, upload-time = "2026-02-24T03:57:43.286Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/93/0868efe753dc1df80cc405cf0c1f2527a6991643607c741bff8dcb899b3b/ijson-3.5.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:25a5a6b2045c90bb83061df27cfa43572afa43ba9408611d7bfe237c20a731a9", size = 89094, upload-time = "2026-02-24T03:57:46.115Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/94/fd5a832a0df52ef5e4e740f14ac8640725d61034a1b0c561e8b5fb424706/ijson-3.5.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:8976c54c0b864bc82b951bae06567566ac77ef63b90a773a69cd73aab47f4f4f", size = 60715, upload-time = "2026-02-24T03:57:47.552Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/79/1b9a90af5732491f9eec751ee211b86b11011e1158c555c06576d52c3919/ijson-3.5.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:859eb2038f7f1b0664df4241957694cc35e6295992d71c98659b22c69b3cbc10", size = 60638, upload-time = "2026-02-24T03:57:48.428Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/6f/2c551ea980fe56f68710a8d5389cfbd015fc45aaafd17c3c52c346db6aa1/ijson-3.5.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c911aa02991c7c0d3639b6619b93a93210ff1e7f58bf7225d613abea10adc78e", size = 140667, upload-time = "2026-02-24T03:57:49.314Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/0e/27b887879ba6a5bc29766e3c5af4942638c952220fd63e1e442674f7883a/ijson-3.5.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:903cbdc350173605220edc19796fbea9b2203c8b3951fb7335abfa8ed37afda8", size = 149850, upload-time = "2026-02-24T03:57:50.329Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/1e/23e10e1bc04bf31193b21e2960dce14b17dbd5d0c62204e8401c59d62c08/ijson-3.5.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a4549d96ded5b8efa71639b2160235415f6bdb8c83367615e2dbabcb72755c33", size = 149206, upload-time = "2026-02-24T03:57:51.261Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/90/e552f6495063b235cf7fa2c592f6597c057077195e517b842a0374fd470c/ijson-3.5.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6b2dcf6349e6042d83f3f8c39ce84823cf7577eba25bac5aae5e39bbbbbe9c1c", size = 150438, upload-time = "2026-02-24T03:57:52.198Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/18/45bf8f297c41b42a1c231d261141097babd953d2c28a07be57ae4c3a1a02/ijson-3.5.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:e44af39e6f8a17e5627dcd89715d8279bf3474153ff99aae031a936e5c5572e5", size = 144369, upload-time = "2026-02-24T03:57:53.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/3a/deb9772bb2c0cead7ad64f00c3598eec9072bdf511818e70e2c512eeabbe/ijson-3.5.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9260332304b7e7828db56d43f08fc970a3ab741bf84ff10189361ea1b60c395b", size = 151352, upload-time = "2026-02-24T03:57:54.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/d9/86f7fac35e0835faa188085ae0579e813493d5261ce056484015ad533445/ijson-3.5.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:2ea4b676ec98e374c1df400a47929859e4fa1239274339024df4716e802aa7e4", size = 93069, upload-time = "2026-02-24T03:57:57.849Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/d2/e7366ed9c6e60228d35baf4404bac01a126e7775ea8ce57f560125ed190a/ijson-3.5.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:014586eec043e23c80be9a923c56c3a0920a0f1f7d17478ce7bc20ba443968ef", size = 62767, upload-time = "2026-02-24T03:57:58.758Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/8b/3e703e8cc4b3ada79f13b28070b51d9550c578f76d1968657905857b2ddd/ijson-3.5.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d5b8b886b0248652d437f66e7c5ac318bbdcb2c7137a7e5327a68ca00b286f5f", size = 62467, upload-time = "2026-02-24T03:58:00.261Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/42/0c91af32c1ee8a957fdac2e051b5780756d05fd34e4b60d94a08d51bac1d/ijson-3.5.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:498fd46ae2349297e43acf97cdc421e711dbd7198418677259393d2acdc62d78", size = 200447, upload-time = "2026-02-24T03:58:01.591Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/80/796ea0e391b7e2d45c5b1b451734bba03f81c2984cf955ea5eaa6c4920ad/ijson-3.5.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22a51b4f9b81f12793731cf226266d1de2112c3c04ba4a04117ad4e466897e05", size = 217820, upload-time = "2026-02-24T03:58:02.598Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/14/52b6613fdda4078c62eb5b4fe3efc724ddc55a4ad524c93de51830107aa3/ijson-3.5.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9636c710dc4ac4a281baa266a64f323b4cc165cec26836af702c44328b59a515", size = 208310, upload-time = "2026-02-24T03:58:04.759Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/ad/8b3105a78774fd4a65e534a21d975ef3a77e189489fe3029ebcaeba5e243/ijson-3.5.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f7168a39e8211107666d71b25693fd1b2bac0b33735ef744114c403c6cac21e1", size = 211843, upload-time = "2026-02-24T03:58:05.836Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/ab/a2739f6072d6e1160581bc3ed32da614c8cced023dcd519d9c5fa66e0425/ijson-3.5.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8696454245415bc617ab03b0dc3ae4c86987df5dc6a90bad378fe72c5409d89e", size = 200906, upload-time = "2026-02-24T03:58:07.788Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/5e/e06c2de3c3d4a9cfb655c1ad08a68fb72838d271072cdd3196576ac4431a/ijson-3.5.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c21bfb61f71f191565885bf1bc29e0a186292d866b4880637b833848360bdc1b", size = 205495, upload-time = "2026-02-24T03:58:09.163Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/3b/d31ecfa63a218978617446159f3d77aab2417a5bd2885c425b176353ff78/ijson-3.5.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d64c624da0e9d692d6eb0ff63a79656b59d76bf80773a17c5b0f835e4e8ef627", size = 57715, upload-time = "2026-02-24T03:58:24.545Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/51/b170e646d378e8cccf9637c05edb5419b00c2c4df64b0258c3af5355608e/ijson-3.5.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:876f7df73b7e0d6474f9caa729b9cdbfc8e76de9075a4887dfd689e29e85c4ca", size = 57205, upload-time = "2026-02-24T03:58:25.681Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/83/44dbd0231b0a8c6c14d27473d10c4e27dfbce7d5d9a833c79e3e6c33eb40/ijson-3.5.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e7dbff2c8d9027809b0cde663df44f3210da10ea377121d42896fb6ee405dd31", size = 71229, upload-time = "2026-02-24T03:58:27.103Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/98/cf84048b7c6cec888826e696a31f45bee7ebcac15e532b6be1fc4c2c9608/ijson-3.5.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4217a1edc278660679e1197c83a1a2a2d367792bfbb2a3279577f4b59b93730d", size = 71217, upload-time = "2026-02-24T03:58:28.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/0a/e34c729a87ff67dc6540f6bcc896626158e691d433ab57db0086d73decd2/ijson-3.5.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:04f0fc740311388ee745ba55a12292b722d6f52000b11acbb913982ba5fbdf87", size = 68618, upload-time = "2026-02-24T03:58:28.918Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "imagehash"
|
||||
version = "4.3.2"
|
||||
@@ -2751,6 +2818,7 @@ dependencies = [
|
||||
{ name = "flower", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "gotenberg-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "httpx-oauth", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "ijson", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "imap-tools", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "langdetect", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -2898,6 +2966,7 @@ requires-dist = [
|
||||
{ name = "gotenberg-client", specifier = "~=0.13.1" },
|
||||
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.7.0" },
|
||||
{ name = "httpx-oauth", specifier = "~=0.16" },
|
||||
{ name = "ijson", specifier = ">=3.2" },
|
||||
{ name = "imap-tools", specifier = "~=1.11.0" },
|
||||
{ name = "jinja2", specifier = "~=3.1.5" },
|
||||
{ name = "langdetect", specifier = "~=1.0.9" },
|
||||
|
||||
Reference in New Issue
Block a user